diff --git a/.codecov.yml b/.codecov.yml index a628d33cbec5..326dd3e0b29e 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -4,13 +4,6 @@ # Can be validated via instructions at: # https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml -# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed -# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage -# needs to be merged across those builds -codecov: - notify: - after_n_builds: 2 - # Settings related to code coverage analysis coverage: status: diff --git a/.dockerignore b/.dockerignore index 0e42960dc9c0..7d3bdc2b4b0d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,6 +6,5 @@ dspace/modules/*/target/ Dockerfile.* dspace/src/main/docker/dspace-postgres-pgcrypto dspace/src/main/docker/dspace-postgres-pgcrypto-curl -dspace/src/main/docker/solr dspace/src/main/docker/README.md dspace/src/main/docker-compose/ diff --git a/.github/disabled-workflows/pull_request_opened.yml b/.github/disabled-workflows/pull_request_opened.yml deleted file mode 100644 index 0dc718c0b9a3..000000000000 --- a/.github/disabled-workflows/pull_request_opened.yml +++ /dev/null @@ -1,26 +0,0 @@ -# This workflow runs whenever a new pull request is created -# TEMPORARILY DISABLED. Unfortunately this doesn't work for PRs created from forked repositories (which is how we tend to create PRs). -# There is no known workaround yet. See https://github.community/t/how-to-use-github-token-for-prs-from-forks/16818 -name: Pull Request opened - -# Only run for newly opened PRs against the "main" branch -on: - pull_request: - types: [opened] - branches: - - main - -jobs: - automation: - runs-on: ubuntu-latest - steps: - # Assign the PR to whomever created it. This is useful for visualizing assignments on project boards - # See https://github.com/marketplace/actions/pull-request-assigner - - name: Assign PR to creator - uses: thomaseizinger/assign-pr-creator-action@v1.0.0 - # Note, this authentication token is created automatically - # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - # Ignore errors. It is possible the PR was created by someone who cannot be assigned - continue-on-error: true diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index b11e3cd531cf..5b3f4336e6a2 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,7 +1,7 @@ ## References _Add references/links to any related issues or PRs. These may include:_ -* Fixes #[issue-number] -* Related to [REST Contract](https://github.com/DSpace/Rest7Contract) +* Fixes #`issue-number` (if this fixes an issue ticket) +* Related to DSpace/RestContract#`pr-number` (if a corresponding REST Contract PR exists) ## Description Short summary of changes (1-2 sentences). @@ -22,5 +22,7 @@ _This checklist provides a reminder of what we are going to look for when review - [ ] My PR passes Checkstyle validation based on the [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide). - [ ] My PR includes Javadoc for _all new (or modified) public methods and classes_. It also includes Javadoc for large or complex private methods. - [ ] My PR passes all tests and includes new/updated Unit or Integration Tests based on the [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide). -- [ ] If my PR includes new, third-party dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation. -- [ ] If my PR modifies the REST API, I've linked to the REST Contract page (or open PR) related to this change. +- [ ] If my PR includes new libraries/dependencies (in any `pom.xml`), I've made sure their licenses align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation. +- [ ] If my PR modifies REST API endpoints, I've opened a separate [REST Contract](https://github.com/DSpace/RestContract/blob/main/README.md) PR related to this change. +- [ ] If my PR includes new configurations, I've provided basic technical documentation in the PR itself. +- [ ] If my PR fixes an issue ticket, I've [linked them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue). diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 67ba6213e332..a2a0d6294f65 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -6,72 +6,106 @@ name: Build # Run this Build for all pushes / PRs to current branch on: [push, pull_request] +permissions: + contents: read # to fetch code (actions/checkout) + jobs: tests: runs-on: ubuntu-latest env: # Give Maven 1GB of memory to work with - # Suppress all Maven "downloading" messages in Travis logs (see https://stackoverflow.com/a/35653426) - # This also slightly speeds builds, as there is less logging - MAVEN_OPTS: "-Xmx1024M -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn" + MAVEN_OPTS: "-Xmx1024M" strategy: # Create a matrix of two separate configurations for Unit vs Integration Tests # This will ensure those tasks are run in parallel + # Also specify version of Java to use (this can allow us to optionally run tests on multiple JDKs in future) matrix: include: - # NOTE: Unit Tests include deprecated REST API v6 (as it has unit tests) + # NOTE: Unit Tests include a retry for occasionally failing tests + # - surefire.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries - type: "Unit Tests" - mvnflags: "-DskipUnitTests=false -Pdspace-rest" + java: 11 + mvnflags: "-DskipUnitTests=false -Dsurefire.rerunFailingTestsCount=2" resultsdir: "**/target/surefire-reports/**" # NOTE: ITs skip all code validation checks, as they are already done by Unit Test job. # - enforcer.skip => Skip maven-enforcer-plugin rules # - checkstyle.skip => Skip all checkstyle checks by maven-checkstyle-plugin # - license.skip => Skip all license header checks by license-maven-plugin # - xml.skip => Skip all XML/XSLT validation by xml-maven-plugin + # - failsafe.rerunFailingTestsCount => try again for flakey tests, and keep track of/report on number of retries - type: "Integration Tests" - mvnflags: "-DskipIntegrationTests=false -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true" + java: 11 + mvnflags: "-DskipIntegrationTests=false -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true -Dfailsafe.rerunFailingTestsCount=2" resultsdir: "**/target/failsafe-reports/**" # Do NOT exit immediately if one matrix job fails # This ensures ITs continue running even if Unit Tests fail, or visa versa fail-fast: false + name: Run ${{ matrix.type }} # These are the actual CI steps to perform per job steps: # https://github.com/actions/checkout - name: Checkout codebase - uses: actions/checkout@v1 + uses: actions/checkout@v4 # https://github.com/actions/setup-java - - name: Install JDK 11 - uses: actions/setup-java@v1 - with: - java-version: 11 - - # https://github.com/actions/cache - - name: Cache Maven dependencies - uses: actions/cache@v2 + - name: Install JDK ${{ matrix.java }} + uses: actions/setup-java@v3 with: - # Cache entire ~/.m2/repository - path: ~/.m2/repository - # Cache key is hash of all pom.xml files. Therefore any changes to POMs will invalidate cache - key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} - restore-keys: ${{ runner.os }}-maven- + java-version: ${{ matrix.java }} + distribution: 'temurin' + cache: 'maven' # Run parallel Maven builds based on the above 'strategy.matrix' - name: Run Maven ${{ matrix.type }} env: TEST_FLAGS: ${{ matrix.mvnflags }} - run: mvn install -B -V -P-assembly -Pcoverage-report $TEST_FLAGS + run: mvn --no-transfer-progress -V install -P-assembly -Pcoverage-report $TEST_FLAGS # If previous step failed, save results of tests to downloadable artifact for this job # (This artifact is downloadable at the bottom of any job's summary page) - name: Upload Results of ${{ matrix.type }} to Artifact if: ${{ failure() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: ${{ matrix.type }} results path: ${{ matrix.resultsdir }} - retention-days: 7 - # https://github.com/codecov/codecov-action + # Upload code coverage report to artifact, so that it can be shared with the 'codecov' job (see below) + - name: Upload code coverage report to Artifact + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.type }} coverage report + path: 'dspace/target/site/jacoco-aggregate/jacoco.xml' + retention-days: 14 + + # Codecov upload is a separate job in order to allow us to restart this separate from the entire build/test + # job above. This is necessary because Codecov uploads seem to randomly fail at times. + # See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954 + codecov: + # Must run after 'tests' job above + needs: tests + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + # Download artifacts from previous 'tests' job + - name: Download coverage artifacts + uses: actions/download-artifact@v3 + + # Now attempt upload to Codecov using its action. + # NOTE: We use a retry action to retry the Codecov upload if it fails the first time. + # + # Retry action: https://github.com/marketplace/actions/retry-action + # Codecov action: https://github.com/codecov/codecov-action - name: Upload coverage to Codecov.io - uses: codecov/codecov-action@v1 + uses: Wandalen/wretry.action@v1.3.0 + with: + action: codecov/codecov-action@v3 + # Ensure codecov-action throws an error when it fails to upload + with: | + fail_ci_if_error: true + # Try re-running action 5 times max + attempt_limit: 5 + # Run again in 30 seconds + attempt_delay: 30000 diff --git a/.github/workflows/codescan.yml b/.github/workflows/codescan.yml new file mode 100644 index 000000000000..13bb0d2278ad --- /dev/null +++ b/.github/workflows/codescan.yml @@ -0,0 +1,63 @@ +# DSpace CodeQL code scanning configuration for GitHub +# https://docs.github.com/en/code-security/code-scanning +# +# NOTE: Code scanning must be run separate from our default build.yml +# because CodeQL requires a fresh build with all tests *disabled*. +name: "Code Scanning" + +# Run this code scan for all pushes / PRs to main or maintenance branches. Also run once a week. +on: + push: + branches: + - main + - 'dspace-**' + pull_request: + branches: + - main + - 'dspace-**' + # Don't run if PR is only updating static documentation + paths-ignore: + - '**/*.md' + - '**/*.txt' + schedule: + - cron: "37 0 * * 1" + +jobs: + analyze: + name: Analyze Code + runs-on: ubuntu-latest + # Limit permissions of this GitHub action. Can only write to security-events + permissions: + actions: read + contents: read + security-events: write + + steps: + # https://github.com/actions/checkout + - name: Checkout repository + uses: actions/checkout@v4 + + # https://github.com/actions/setup-java + - name: Install JDK + uses: actions/setup-java@v3 + with: + java-version: 11 + distribution: 'temurin' + + # Initializes the CodeQL tools for scanning. + # https://github.com/github/codeql-action + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + # Codescan Javascript as well since a few JS files exist in REST API's interface + languages: java, javascript + + # Autobuild attempts to build any compiled languages + # NOTE: Based on testing, this autobuild process works well for DSpace. A custom + # DSpace build w/caching (like in build.yml) was about the same speed as autobuild. + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # Perform GitHub Code Scanning. + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 000000000000..9f1e407cff4b --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,150 @@ +# DSpace Docker image build for hub.docker.com +name: Docker images + +# Run this Build for all pushes to 'main' or maintenance branches, or tagged releases. +# Also run for PRs to ensure PR doesn't break Docker build process +# NOTE: uses "reusable-docker-build.yml" to actually build each of the Docker images. +on: + push: + branches: + - main + - 'dspace-**' + tags: + - 'dspace-**' + pull_request: + +permissions: + contents: read # to fetch code (actions/checkout) + +jobs: + #################################################### + # Build/Push the 'dspace/dspace-dependencies' image. + # This image is used by all other DSpace build jobs. + #################################################### + dspace-dependencies: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + uses: ./.github/workflows/reusable-docker-build.yml + with: + build_id: dspace-dependencies + image_name: dspace/dspace-dependencies + dockerfile_path: ./Dockerfile.dependencies + secrets: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + ####################################### + # Build/Push the 'dspace/dspace' image + ####################################### + dspace: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + # Must run after 'dspace-dependencies' job above + needs: dspace-dependencies + uses: ./.github/workflows/reusable-docker-build.yml + with: + build_id: dspace + image_name: dspace/dspace + dockerfile_path: ./Dockerfile + secrets: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Enable redeploy of sandbox & demo if the branch for this image matches the deployment branch of + # these sites as specified in reusable-docker-build.xml + REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_URL }} + REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_URL }} + + ############################################################# + # Build/Push the 'dspace/dspace' image ('-test' tag) + ############################################################# + dspace-test: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + # Must run after 'dspace-dependencies' job above + needs: dspace-dependencies + uses: ./.github/workflows/reusable-docker-build.yml + with: + build_id: dspace-test + image_name: dspace/dspace + dockerfile_path: ./Dockerfile.test + # As this is a test/development image, its tags are all suffixed with "-test". Otherwise, it uses the same + # tagging logic as the primary 'dspace/dspace' image above. + tags_flavor: suffix=-test + secrets: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + ########################################### + # Build/Push the 'dspace/dspace-cli' image + ########################################### + dspace-cli: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + # Must run after 'dspace-dependencies' job above + needs: dspace-dependencies + uses: ./.github/workflows/reusable-docker-build.yml + with: + build_id: dspace-cli + image_name: dspace/dspace-cli + dockerfile_path: ./Dockerfile.cli + secrets: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + ########################################### + # Build/Push the 'dspace/dspace-solr' image + ########################################### + dspace-solr: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + uses: ./.github/workflows/reusable-docker-build.yml + with: + build_id: dspace-solr + image_name: dspace/dspace-solr + dockerfile_path: ./dspace/src/main/docker/dspace-solr/Dockerfile + # Must pass solrconfigs to the Dockerfile so that it can find the required Solr config files + dockerfile_additional_contexts: 'solrconfigs=./dspace/solr/' + secrets: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }} + # Enable redeploy of sandbox & demo SOLR instance whenever dspace-solr image changes for deployed branch. + # These URLs MUST use different secrets than 'dspace/dspace' image build above as they are deployed separately. + REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_SOLR_URL }} + REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_SOLR_URL }} + + ########################################################### + # Build/Push the 'dspace/dspace-postgres-pgcrypto' image + ########################################################### + dspace-postgres-pgcrypto: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + uses: ./.github/workflows/reusable-docker-build.yml + with: + build_id: dspace-postgres-pgcrypto + image_name: dspace/dspace-postgres-pgcrypto + # Must build out of subdirectory to have access to install script for pgcrypto. + # NOTE: this context will build the image based on the Dockerfile in the specified directory + dockerfile_context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ + secrets: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + ######################################################################## + # Build/Push the 'dspace/dspace-postgres-pgcrypto' image (-loadsql tag) + ######################################################################## + dspace-postgres-pgcrypto-loadsql: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' + uses: ./.github/workflows/reusable-docker-build.yml + with: + build_id: dspace-postgres-pgcrypto-loadsql + image_name: dspace/dspace-postgres-pgcrypto + # Must build out of subdirectory to have access to install script for pgcrypto. + # NOTE: this context will build the image based on the Dockerfile in the specified directory + dockerfile_context: ./dspace/src/main/docker/dspace-postgres-pgcrypto-curl/ + # Suffix all tags with "-loadsql". Otherwise, it uses the same + # tagging logic as the primary 'dspace/dspace-postgres-pgcrypto' image above. + tags_flavor: suffix=-loadsql + secrets: + DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} + DOCKER_ACCESS_TOKEN: ${{ secrets.DOCKER_ACCESS_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/issue_opened.yml b/.github/workflows/issue_opened.yml index 3ccdd22a0ddd..b4436dca3aad 100644 --- a/.github/workflows/issue_opened.yml +++ b/.github/workflows/issue_opened.yml @@ -5,25 +5,22 @@ on: issues: types: [opened] +permissions: {} jobs: automation: runs-on: ubuntu-latest steps: # Add the new issue to a project board, if it needs triage - # See https://github.com/marketplace/actions/create-project-card-action - - name: Add issue to project board + # See https://github.com/actions/add-to-project + - name: Add issue to triage board # Only add to project board if issue is flagged as "needs triage" or has no labels # NOTE: By default we flag new issues as "needs triage" in our issue template if: (contains(github.event.issue.labels.*.name, 'needs triage') || join(github.event.issue.labels.*.name) == '') - uses: technote-space/create-project-card-action@v1 + uses: actions/add-to-project@v0.5.0 # Note, the authentication token below is an ORG level Secret. - # It must be created/recreated manually via a personal access token with "public_repo" and "admin:org" permissions + # It must be created/recreated manually via a personal access token with admin:org, project, public_repo permissions # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#permissions-for-the-github_token # This is necessary because the "DSpace Backlog" project is an org level project (i.e. not repo specific) with: - GITHUB_TOKEN: ${{ secrets.ORG_PROJECT_TOKEN }} - PROJECT: DSpace Backlog - COLUMN: Triage - CHECK_ORG_PROJECT: true - # Ignore errors. - continue-on-error: true + github-token: ${{ secrets.TRIAGE_PROJECT_TOKEN }} + project-url: https://github.com/orgs/DSpace/projects/24 diff --git a/.github/workflows/label_merge_conflicts.yml b/.github/workflows/label_merge_conflicts.yml index dcbab18f1b57..a023f4eef246 100644 --- a/.github/workflows/label_merge_conflicts.yml +++ b/.github/workflows/label_merge_conflicts.yml @@ -1,25 +1,39 @@ # This workflow checks open PRs for merge conflicts and labels them when conflicts are found name: Check for merge conflicts -# Run whenever the "main" branch is updated -# NOTE: This means merge conflicts are only checked for when a PR is merged to main. +# Run this for all pushes (i.e. merges) to 'main' or maintenance branches on: push: branches: - main + - 'dspace-**' + # So that the `conflict_label_name` is removed if conflicts are resolved, + # we allow this to run for `pull_request_target` so that github secrets are available. + pull_request_target: + types: [ synchronize ] + +permissions: {} jobs: triage: + # Ensure this job never runs on forked repos. It's only executed for 'dspace/dspace' + if: github.repository == 'dspace/dspace' runs-on: ubuntu-latest + permissions: + pull-requests: write steps: - # See: https://github.com/mschilde/auto-label-merge-conflicts/ + # See: https://github.com/prince-chrismc/label-merge-conflicts-action - name: Auto-label PRs with merge conflicts - uses: mschilde/auto-label-merge-conflicts@v2.0 + uses: prince-chrismc/label-merge-conflicts-action@v3 + # Ignore any failures -- may occur (randomly?) for older, outdated PRs. + continue-on-error: true # Add "merge conflict" label if a merge conflict is detected. Remove it when resolved. # Note, the authentication token is created automatically # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token with: - CONFLICT_LABEL_NAME: 'merge conflict' - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # Ignore errors - continue-on-error: true + conflict_label_name: 'merge conflict' + github_token: ${{ secrets.GITHUB_TOKEN }} + conflict_comment: | + Hi @${author}, + Conflicts have been detected against the base branch. + Please [resolve these conflicts](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/about-merge-conflicts) as soon as you can. Thanks! diff --git a/.github/workflows/port_merged_pull_request.yml b/.github/workflows/port_merged_pull_request.yml new file mode 100644 index 000000000000..857f22755e49 --- /dev/null +++ b/.github/workflows/port_merged_pull_request.yml @@ -0,0 +1,46 @@ +# This workflow will attempt to port a merged pull request to +# the branch specified in a "port to" label (if exists) +name: Port merged Pull Request + +# Only run for merged PRs against the "main" or maintenance branches +# We allow this to run for `pull_request_target` so that github secrets are available +# (This is required when the PR comes from a forked repo) +on: + pull_request_target: + types: [ closed ] + branches: + - main + - 'dspace-**' + +permissions: + contents: write # so action can add comments + pull-requests: write # so action can create pull requests + +jobs: + port_pr: + runs-on: ubuntu-latest + # Don't run on closed *unmerged* pull requests + if: github.event.pull_request.merged + steps: + # Checkout code + - uses: actions/checkout@v4 + # Port PR to other branch (ONLY if labeled with "port to") + # See https://github.com/korthout/backport-action + - name: Create backport pull requests + uses: korthout/backport-action@v2 + with: + # Trigger based on a "port to [branch]" label on PR + # (This label must specify the branch name to port to) + label_pattern: '^port to ([^ ]+)$' + # Title to add to the (newly created) port PR + pull_title: '[Port ${target_branch}] ${pull_title}' + # Description to add to the (newly created) port PR + pull_description: 'Port of #${pull_number} by @${pull_author} to `${target_branch}`.' + # Copy all labels from original PR to (newly created) port PR + # NOTE: The labels matching 'label_pattern' are automatically excluded + copy_labels_pattern: '.*' + # Skip any merge commits in the ported PR. This means only non-merge commits are cherry-picked to the new PR + merge_commits: 'skip' + # Use a personal access token (PAT) to create PR as 'dspace-bot' user. + # A PAT is required in order for the new PR to trigger its own actions (for CI checks) + github_token: ${{ secrets.PR_PORT_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/pull_request_opened.yml b/.github/workflows/pull_request_opened.yml new file mode 100644 index 000000000000..f16e81c9fd25 --- /dev/null +++ b/.github/workflows/pull_request_opened.yml @@ -0,0 +1,24 @@ +# This workflow runs whenever a new pull request is created +name: Pull Request opened + +# Only run for newly opened PRs against the "main" or maintenance branches +# We allow this to run for `pull_request_target` so that github secrets are available +# (This is required to assign a PR back to the creator when the PR comes from a forked repo) +on: + pull_request_target: + types: [ opened ] + branches: + - main + - 'dspace-**' + +permissions: + pull-requests: write + +jobs: + automation: + runs-on: ubuntu-latest + steps: + # Assign the PR to whomever created it. This is useful for visualizing assignments on project boards + # See https://github.com/toshimaru/auto-author-assign + - name: Assign PR to creator + uses: toshimaru/auto-author-assign@v2.0.1 diff --git a/.github/workflows/reusable-docker-build.yml b/.github/workflows/reusable-docker-build.yml new file mode 100644 index 000000000000..aa8327f4d11b --- /dev/null +++ b/.github/workflows/reusable-docker-build.yml @@ -0,0 +1,225 @@ +# +# DSpace's reusable Docker build/push workflow. +# +# This is used by docker.yml for all Docker image builds +name: Reusable DSpace Docker Build + +on: + workflow_call: + # Possible Inputs to this reusable job + inputs: + # Build name/id for this Docker build. Used for digest storage to avoid digest overlap between builds. + build_id: + required: true + type: string + # Requires the image name to build (e.g dspace/dspace-test) + image_name: + required: true + type: string + # Optionally the path to the Dockerfile to use for the build. (Default is [dockerfile_context]/Dockerfile) + dockerfile_path: + required: false + type: string + # Optionally the context directory to build the Dockerfile within. Defaults to "." (current directory) + dockerfile_context: + required: false + type: string + default: '.' + # Optionally a list of "additional_contexts" to pass to Dockerfile. Defaults to empty + dockerfile_additional_contexts: + required: false + type: string + default: '' + # If Docker image should have additional tag flavor details (e.g. a suffix), it may be passed in. + tags_flavor: + required: false + type: string + secrets: + # Requires that Docker login info be passed in as secrets. + DOCKER_USERNAME: + required: true + DOCKER_ACCESS_TOKEN: + required: true + # These URL secrets are optional. When specified & branch checks match, the redeployment code below will trigger. + # Therefore builds which need to trigger redeployment MUST specify these URLs. All others should leave them empty. + REDEPLOY_SANDBOX_URL: + required: false + REDEPLOY_DEMO_URL: + required: false + +# Define shared default settings as environment variables +env: + IMAGE_NAME: ${{ inputs.image_name }} + # Define tags to use for Docker images based on Git tags/branches (for docker/metadata-action) + # For a new commit on default branch (main), use the literal tag 'latest' on Docker image. + # For a new commit on other branches, use the branch name as the tag for Docker image. + # For a new tag, copy that tag name as the tag for Docker image. + IMAGE_TAGS: | + type=raw,value=latest,enable=${{ github.ref_name == github.event.repository.default_branch }} + type=ref,event=branch,enable=${{ github.ref_name != github.event.repository.default_branch }} + type=ref,event=tag + # Define default tag "flavor" for docker/metadata-action per + # https://github.com/docker/metadata-action#flavor-input + # We manage the 'latest' tag ourselves to the 'main' branch (see settings above) + TAGS_FLAVOR: | + latest=false + ${{ inputs.tags_flavor }} + # When these URL variables are specified & required branch matches, then the sandbox or demo site will be redeployed. + # See "Redeploy" steps below for more details. + REDEPLOY_SANDBOX_URL: ${{ secrets.REDEPLOY_SANDBOX_URL }} + REDEPLOY_DEMO_URL: ${{ secrets.REDEPLOY_DEMO_URL }} + # Current DSpace maintenance branch (and architecture) which is deployed to demo.dspace.org / sandbox.dspace.org + # (NOTE: No deployment branch specified for sandbox.dspace.org as it uses the default_branch) + DEPLOY_DEMO_BRANCH: 'dspace-7_x' + DEPLOY_ARCH: 'linux/amd64' + +jobs: + docker-build: + + strategy: + matrix: + # Architectures / Platforms for which we will build Docker images + arch: [ 'linux/amd64', 'linux/arm64' ] + os: [ ubuntu-latest ] + isPr: + - ${{ github.event_name == 'pull_request' }} + # If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work. + # The below exclude therefore ensures we do NOT build ARM64 for PRs. + exclude: + - isPr: true + os: ubuntu-latest + arch: linux/arm64 + + runs-on: ${{ matrix.os }} + + steps: + # https://github.com/actions/checkout + - name: Checkout codebase + uses: actions/checkout@v4 + + # https://github.com/docker/setup-buildx-action + - name: Setup Docker Buildx + uses: docker/setup-buildx-action@v3 + + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v3 + + # https://github.com/docker/login-action + - name: Login to DockerHub + # Only login if not a PR, as PRs only trigger a Docker build and not a push + if: ${{ ! matrix.isPr }} + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + # https://github.com/docker/metadata-action + # Get Metadata for docker_build_deps step below + - name: Sync metadata (tags, labels) from GitHub to Docker for image + id: meta_build + uses: docker/metadata-action@v5 + with: + images: ${{ env.IMAGE_NAME }} + tags: ${{ env.IMAGE_TAGS }} + flavor: ${{ env.TAGS_FLAVOR }} + + # https://github.com/docker/build-push-action + - name: Build and push image + id: docker_build + uses: docker/build-push-action@v5 + with: + build-contexts: | + ${{ inputs.dockerfile_additional_contexts }} + context: ${{ inputs.dockerfile_context }} + file: ${{ inputs.dockerfile_path }} + platforms: ${{ matrix.arch }} + # For pull requests, we run the Docker build (to ensure no PR changes break the build), + # but we ONLY do an image push to DockerHub if it's NOT a PR + push: ${{ ! matrix.isPr }} + # Use tags / labels provided by 'docker/metadata-action' above + tags: ${{ steps.meta_build.outputs.tags }} + labels: ${{ steps.meta_build.outputs.labels }} + + # Export the digest of Docker build locally (for non PRs only) + - name: Export Docker build digest + if: ${{ ! matrix.isPr }} + run: | + mkdir -p /tmp/digests + digest="${{ steps.docker_build.outputs.digest }}" + touch "/tmp/digests/${digest#sha256:}" + + # Upload digest to an artifact, so that it can be used in manifest below + - name: Upload Docker build digest to artifact + if: ${{ ! matrix.isPr }} + uses: actions/upload-artifact@v3 + with: + name: digests-${{ inputs.build_id }} + path: /tmp/digests/* + if-no-files-found: error + retention-days: 1 + + # If this build is NOT a PR and passed in a REDEPLOY_SANDBOX_URL secret, + # Then redeploy https://sandbox.dspace.org if this build is for our deployment architecture and 'main' branch. + - name: Redeploy sandbox.dspace.org (based on main branch) + if: | + !matrix.isPR && + env.REDEPLOY_SANDBOX_URL != '' && + matrix.arch == env.DEPLOY_ARCH && + github.ref_name == github.event.repository.default_branch + run: | + curl -X POST $REDEPLOY_SANDBOX_URL + + # If this build is NOT a PR and passed in a REDEPLOY_DEMO_URL secret, + # Then redeploy https://demo.dspace.org if this build is for our deployment architecture and demo branch. + - name: Redeploy demo.dspace.org (based on maintenace branch) + if: | + !matrix.isPR && + env.REDEPLOY_DEMO_URL != '' && + matrix.arch == env.DEPLOY_ARCH && + github.ref_name == env.DEPLOY_DEMO_BRANCH + run: | + curl -X POST $REDEPLOY_DEMO_URL + + # Merge Docker digests (from various architectures) into a manifest. + # This runs after all Docker builds complete above, and it tells hub.docker.com + # that these builds should be all included in the manifest for this tag. + # (e.g. AMD64 and ARM64 should be listed as options under the same tagged Docker image) + docker-build_manifest: + if: ${{ github.event_name != 'pull_request' }} + runs-on: ubuntu-latest + needs: + - docker-build + steps: + - name: Download Docker build digests + uses: actions/download-artifact@v3 + with: + name: digests-${{ inputs.build_id }} + path: /tmp/digests + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Add Docker metadata for image + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.IMAGE_NAME }} + tags: ${{ env.IMAGE_TAGS }} + flavor: ${{ env.TAGS_FLAVOR }} + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + - name: Create manifest list from digests and push + working-directory: /tmp/digests + run: | + docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ + $(printf '${{ env.IMAGE_NAME }}@sha256:%s ' *) + + - name: Inspect image + run: | + docker buildx imagetools inspect ${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }} diff --git a/.gitignore b/.gitignore index fc8dab2da940..2fcb46b9932c 100644 --- a/.gitignore +++ b/.gitignore @@ -42,3 +42,7 @@ nb-configuration.xml ##Ignore JRebel project configuration rebel.xml + + +## Ignore jenv configuration +.java-version diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000000..45a6af9ce5a3 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,45 @@ +# How to Contribute + +DSpace is a community built and supported project. We do not have a centralized development or support team, but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc. + +* [Contribute new code via a Pull Request](#contribute-new-code-via-a-pull-request) +* [Contribute documentation](#contribute-documentation) +* [Help others on mailing lists or Slack](#help-others-on-mailing-lists-or-slack) +* [Join a working or interest group](#join-a-working-or-interest-group) + +## Contribute new code via a Pull Request + +We accept [GitHub Pull Requests (PRs)](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork) at any time from anyone. +Contributors to each release are recognized in our [Release Notes](https://wiki.lyrasis.org/display/DSDOC7x/Release+Notes). + +Code Contribution Checklist +- [ ] PRs _should_ be smaller in size (ideally less than 1,000 lines of code, not including comments & tests) +- [ ] PRs **must** pass Checkstyle validation based on our [Code Style Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Style+Guide). +- [ ] PRs **must** include Javadoc for _all new/modified public methods and classes_. Larger private methods should also have Javadoc +- [ ] PRs **must** pass all automated tests and include new/updated Unit or Integration tests based on our [Code Testing Guide](https://wiki.lyrasis.org/display/DSPACE/Code+Testing+Guide). +- [ ] If a PR includes new libraries/dependencies (in any `pom.xml`), then their software licenses **must** align with the [DSpace BSD License](https://github.com/DSpace/DSpace/blob/main/LICENSE) based on the [Licensing of Contributions](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines#CodeContributionGuidelines-LicensingofContributions) documentation. +- [ ] Basic technical documentation _should_ be provided for any new features or changes to the REST API. REST API changes should be documented in our [Rest Contract](https://github.com/DSpace/RestContract). +- [ ] If a PR fixes an issue ticket, please [link them together](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue). + +Additional details on the code contribution process can be found in our [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines) + +## Contribute documentation + +DSpace Documentation is a collaborative effort in a shared Wiki. The latest documentation is at https://wiki.lyrasis.org/display/DSDOC7x + +If you find areas of the DSpace Documentation which you wish to improve, please request a Wiki account by emailing wikihelp@lyrasis.org. +Once you have an account setup, contact @tdonohue (via [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) or email) for access to edit our Documentation. + +## Help others on mailing lists or Slack + +DSpace has our own [Slack](https://wiki.lyrasis.org/display/DSPACE/Slack) community and [Mailing Lists](https://wiki.lyrasis.org/display/DSPACE/Mailing+Lists) where discussions take place and questions are answered. +Anyone is welcome to join and help others. We just ask you to follow our [Code of Conduct](https://www.lyrasis.org/about/Pages/Code-of-Conduct.aspx) (adopted via LYRASIS). + +## Join a working or interest group + +Most of the work in building/improving DSpace comes via [Working Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Working+Groups) or [Interest Groups](https://wiki.lyrasis.org/display/DSPACE/DSpace+Interest+Groups). + +All working/interest groups are open to anyone to join and participate. A few key groups to be aware of include: + +* [DSpace 7 Working Group](https://wiki.lyrasis.org/display/DSPACE/DSpace+7+Working+Group) - This is the main (mostly volunteer) development team. We meet weekly to review our current development [project board](https://github.com/orgs/DSpace/projects), assigning tickets and/or PRs. +* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team) - This is an interest group for repository managers/administrators. We meet monthly to discuss DSpace, share tips & provide feedback back to developers. \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 2dc3ee9bda6c..5bcd68376887 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,63 +1,71 @@ # This image will be published as dspace/dspace # See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details # -# This version is JDK11 compatible -# - tomcat:8-jdk11 -# - ANT 1.10.7 -# - maven:3-jdk-11 (see dspace-dependencies) -# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x +# - note: default tag for branch: dspace/dspace: dspace/dspace:latest + +# This Dockerfile uses JDK11 by default, but has also been tested with JDK17. +# To build with JDK17, use "--build-arg JDK_VERSION=17" +ARG JDK_VERSION=11 +ARG DSPACE_VERSION=latest # Step 1 - Run Maven Build -FROM dspace/dspace-dependencies:dspace-7_x as build +FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build ARG TARGET_DIR=dspace-installer WORKDIR /app - -# The dspace-install directory will be written to /install +# The dspace-installer directory will be written to /install RUN mkdir /install \ && chown -Rv dspace: /install \ && chown -Rv dspace: /app - USER dspace - -# Copy the DSpace source code into the workdir (excluding .dockerignore contents) +# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents) ADD --chown=dspace . /app/ -COPY dspace/src/main/docker/local.cfg /app/local.cfg - -# Build DSpace (note: this build doesn't include the optional, deprecated "dspace-rest" webapp) -# Copy the dspace-install directory to /install. Clean up the build to keep the docker image small -RUN mvn package && \ +# Build DSpace +# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small +# Maven flags here ensure that we skip building test environment and skip all code verification checks. +# These flags speed up this compilation as much as reasonably possible. +ENV MAVEN_FLAGS="-P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true" +RUN mvn --no-transfer-progress package ${MAVEN_FLAGS} && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \ mvn clean # Step 2 - Run Ant Deploy -FROM tomcat:8-jdk11 as ant_build +FROM openjdk:${JDK_VERSION}-slim as ant_build ARG TARGET_DIR=dspace-installer +# COPY the /install directory from 'build' container to /dspace-src in this container COPY --from=build /install /dspace-src WORKDIR /dspace-src - # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.7 +ENV ANT_VERSION 1.10.13 ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV PATH $ANT_HOME/bin:$PATH - +# Need wget to install ant +RUN apt-get update \ + && apt-get install -y --no-install-recommends wget \ + && apt-get purge -y --auto-remove \ + && rm -rf /var/lib/apt/lists/* +# Download and install 'ant' RUN mkdir $ANT_HOME && \ wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME - +# Run necessary 'ant' deploy scripts RUN ant init_installation update_configs update_code update_webapps # Step 3 - Run tomcat # Create a new tomcat image that does not retain the the build directory contents -FROM tomcat:8-jdk11 +FROM tomcat:9-jdk${JDK_VERSION} +# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. ENV DSPACE_INSTALL=/dspace +# Copy the /dspace directory from 'ant_build' container to /dspace in this container COPY --from=ant_build /dspace $DSPACE_INSTALL +# Expose Tomcat port and AJP port EXPOSE 8080 8009 - +# Give java extra memory (2GB) ENV JAVA_OPTS=-Xmx2000m -# Run the "server" webapp off the /server path (e.g. http://localhost:8080/server/) +# Link the DSpace 'server' webapp into Tomcat's webapps directory. +# This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/) RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server # If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN. -# You also MUST update the URL in dspace/src/main/docker/local.cfg +# You also MUST update the 'dspace.server.url' configuration to match. # Please note that server webapp should only run on one path at a time. #RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \ # ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT diff --git a/Dockerfile.cli b/Dockerfile.cli index d4204ebdd073..d54978375e54 100644 --- a/Dockerfile.cli +++ b/Dockerfile.cli @@ -1,53 +1,55 @@ # This image will be published as dspace/dspace-cli # See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details # -# This version is JDK11 compatible -# - openjdk:11 -# - ANT 1.10.7 -# - maven:3-jdk-11 (see dspace-dependencies) -# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:dspace-7_x +# - note: default tag for branch: dspace/dspace-cli: dspace/dspace-cli:latest + +# This Dockerfile uses JDK11 by default, but has also been tested with JDK17. +# To build with JDK17, use "--build-arg JDK_VERSION=17" +ARG JDK_VERSION=11 +ARG DSPACE_VERSION=latest # Step 1 - Run Maven Build -FROM dspace/dspace-dependencies:dspace-7_x as build +FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build ARG TARGET_DIR=dspace-installer WORKDIR /app - -# The dspace-install directory will be written to /install +# The dspace-installer directory will be written to /install RUN mkdir /install \ && chown -Rv dspace: /install \ && chown -Rv dspace: /app - USER dspace - -# Copy the DSpace source code into the workdir (excluding .dockerignore contents) +# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents) ADD --chown=dspace . /app/ -COPY dspace/src/main/docker/local.cfg /app/local.cfg - -# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small -RUN mvn package && \ +# Build DSpace. Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small +RUN mvn --no-transfer-progress package && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \ mvn clean # Step 2 - Run Ant Deploy -FROM openjdk:11 as ant_build +FROM openjdk:${JDK_VERSION}-slim as ant_build ARG TARGET_DIR=dspace-installer +# COPY the /install directory from 'build' container to /dspace-src in this container COPY --from=build /install /dspace-src WORKDIR /dspace-src - # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.7 +ENV ANT_VERSION 1.10.13 ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV PATH $ANT_HOME/bin:$PATH - +# Need wget to install ant, and unzip for managing AIPs +RUN apt-get update \ + && apt-get install -y --no-install-recommends wget unzip \ + && apt-get purge -y --auto-remove \ + && rm -rf /var/lib/apt/lists/* +# Download and install 'ant' RUN mkdir $ANT_HOME && \ wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME - +# Run necessary 'ant' deploy scripts RUN ant init_installation update_configs update_code # Step 3 - Run jdk -# Create a new tomcat image that does not retain the the build directory contents -FROM openjdk:11 +FROM openjdk:${JDK_VERSION} +# NOTE: DSPACE_INSTALL must align with the "dspace.dir" default configuration. ENV DSPACE_INSTALL=/dspace +# Copy the /dspace directory from 'ant_build' container to /dspace in this container COPY --from=ant_build /dspace $DSPACE_INSTALL - +# Give java extra memory (1GB) ENV JAVA_OPTS=-Xmx1000m diff --git a/Dockerfile.dependencies b/Dockerfile.dependencies index 54647ebad1e7..6f72ab058536 100644 --- a/Dockerfile.dependencies +++ b/Dockerfile.dependencies @@ -1,27 +1,34 @@ # This image will be published as dspace/dspace-dependencies # The purpose of this image is to make the build for dspace/dspace run faster # -# This version is JDK11 compatible -# - maven:3-jdk-11 + +# This Dockerfile uses JDK11 by default, but has also been tested with JDK17. +# To build with JDK17, use "--build-arg JDK_VERSION=17" +ARG JDK_VERSION=11 # Step 1 - Run Maven Build -FROM maven:3-jdk-11 as build +FROM maven:3-openjdk-${JDK_VERSION}-slim as build ARG TARGET_DIR=dspace-installer WORKDIR /app - +# Create the 'dspace' user account & home directory RUN useradd dspace \ - && mkdir /home/dspace \ + && mkdir -p /home/dspace \ && chown -Rv dspace: /home/dspace +RUN chown -Rv dspace: /app + +# Switch to dspace user & run below commands as that user USER dspace -# Copy the DSpace source code into the workdir (excluding .dockerignore contents) +# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents) ADD --chown=dspace . /app/ -COPY dspace/src/main/docker/local.cfg /app/local.cfg -# Trigger the installation of all maven dependencies -RUN mvn package +# Trigger the installation of all maven dependencies (hide download progress messages) +# Maven flags here ensure that we skip final assembly, skip building test environment and skip all code verification checks. +# These flags speed up this installation as much as reasonably possible. +ENV MAVEN_FLAGS="-P-assembly -P-test-environment -Denforcer.skip=true -Dcheckstyle.skip=true -Dlicense.skip=true -Dxml.skip=true" +RUN mvn --no-transfer-progress install ${MAVEN_FLAGS} # Clear the contents of the /app directory (including all maven builds), so no artifacts remain. -# This ensures when dspace:dspace is built, it will just the Maven local cache (.m2) for dependencies +# This ensures when dspace:dspace is built, it will use the Maven local cache (~/.m2) for dependencies USER root RUN rm -rf /app/* diff --git a/Dockerfile.test b/Dockerfile.test index f8d124b3ae32..6fcc4eda6be1 100644 --- a/Dockerfile.test +++ b/Dockerfile.test @@ -1,77 +1,76 @@ # This image will be published as dspace/dspace # See https://github.com/DSpace/DSpace/tree/main/dspace/src/main/docker for usage details # -# This version is JDK11 compatible -# - tomcat:8-jdk11 -# - ANT 1.10.7 -# - maven:3-jdk-11 (see dspace-dependencies) -# - note: default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-test +# - note: default tag for branch: dspace/dspace: dspace/dspace:latest-test # # This image is meant for TESTING/DEVELOPMENT ONLY as it deploys the old v6 REST API under HTTP (not HTTPS) +# This Dockerfile uses JDK11 by default, but has also been tested with JDK17. +# To build with JDK17, use "--build-arg JDK_VERSION=17" +ARG JDK_VERSION=11 +ARG DSPACE_VERSION=latest + # Step 1 - Run Maven Build -FROM dspace/dspace-dependencies:dspace-7_x as build +FROM dspace/dspace-dependencies:${DSPACE_VERSION} as build ARG TARGET_DIR=dspace-installer WORKDIR /app - -# The dspace-install directory will be written to /install +# The dspace-installer directory will be written to /install RUN mkdir /install \ && chown -Rv dspace: /install \ && chown -Rv dspace: /app - USER dspace - -# Copy the DSpace source code into the workdir (excluding .dockerignore contents) +# Copy the DSpace source code (from local machine) into the workdir (excluding .dockerignore contents) ADD --chown=dspace . /app/ -COPY dspace/src/main/docker/local.cfg /app/local.cfg - -# Build DSpace (including the optional, deprecated "dspace-rest" webapp) -# Copy the dspace-install directory to /install. Clean up the build to keep the docker image small -RUN mvn package -Pdspace-rest && \ +# Build DSpace +# Copy the dspace-installer directory to /install. Clean up the build to keep the docker image small +RUN mvn --no-transfer-progress package && \ mv /app/dspace/target/${TARGET_DIR}/* /install && \ mvn clean # Step 2 - Run Ant Deploy -FROM tomcat:8-jdk11 as ant_build +FROM openjdk:${JDK_VERSION}-slim as ant_build ARG TARGET_DIR=dspace-installer +# COPY the /install directory from 'build' container to /dspace-src in this container COPY --from=build /install /dspace-src WORKDIR /dspace-src - # Create the initial install deployment using ANT -ENV ANT_VERSION 1.10.7 +ENV ANT_VERSION 1.10.12 ENV ANT_HOME /tmp/ant-$ANT_VERSION ENV PATH $ANT_HOME/bin:$PATH - +# Need wget to install ant +RUN apt-get update \ + && apt-get install -y --no-install-recommends wget \ + && apt-get purge -y --auto-remove \ + && rm -rf /var/lib/apt/lists/* +# Download and install 'ant' RUN mkdir $ANT_HOME && \ wget -qO- "https://archive.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME - +# Run necessary 'ant' deploy scripts RUN ant init_installation update_configs update_code update_webapps # Step 3 - Run tomcat # Create a new tomcat image that does not retain the the build directory contents -FROM tomcat:8-jdk11 +FROM tomcat:9-jdk${JDK_VERSION} ENV DSPACE_INSTALL=/dspace ENV TOMCAT_INSTALL=/usr/local/tomcat +# Copy the /dspace directory from 'ant_build' containger to /dspace in this container COPY --from=ant_build /dspace $DSPACE_INSTALL # Enable the AJP connector in Tomcat's server.xml # NOTE: secretRequired="false" should only be used when AJP is NOT accessible from an external network. But, secretRequired="true" isn't supported by mod_proxy_ajp until Apache 2.5 RUN sed -i '/Service name="Catalina".*/a \\n ' $TOMCAT_INSTALL/conf/server.xml # Expose Tomcat port and AJP port -EXPOSE 8080 8009 - +EXPOSE 8080 8009 8000 +# Give java extra memory (2GB) ENV JAVA_OPTS=-Xmx2000m +# Set up debugging +ENV CATALINA_OPTS=-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=*:8000 -# Run the "server" webapp off the /server path (e.g. http://localhost:8080/server/) -# and the v6.x (deprecated) REST API off the "/rest" path -RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server && \ - ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest +# Link the DSpace 'server' webapp into Tomcat's webapps directory. +# This ensures that when we start Tomcat, it runs from /server path (e.g. http://localhost:8080/server/) +RUN ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/server # If you wish to run "server" webapp off the ROOT path, then comment out the above RUN, and uncomment the below RUN. -# You also MUST update the URL in dspace/src/main/docker/local.cfg +# You also MUST update the 'dspace.server.url' configuration to match. # Please note that server webapp should only run on one path at a time. #RUN mv /usr/local/tomcat/webapps/ROOT /usr/local/tomcat/webapps/ROOT.bk && \ -# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT && \ -# ln -s $DSPACE_INSTALL/webapps/rest /usr/local/tomcat/webapps/rest +# ln -s $DSPACE_INSTALL/webapps/server /usr/local/tomcat/webapps/ROOT -# Overwrite the v6.x (deprecated) REST API's web.xml, so that we can run it on HTTP (defaults to requiring HTTPS) -COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml -RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml diff --git a/LICENSE b/LICENSE index f55d21fe42f7..b381f6d96818 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -DSpace source code BSD License: +BSD 3-Clause License Copyright (c) 2002-2021, LYRASIS. All rights reserved. @@ -13,13 +13,12 @@ notice, this list of conditions and the following disclaimer. notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. -- Neither the name DuraSpace nor the name of the DSpace Foundation -nor the names of its contributors may be used to endorse or promote -products derived from this software without specific prior written -permission. +- Neither the name of the copyright holder nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, @@ -29,11 +28,4 @@ OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH -DAMAGE. - - -DSpace uses third-party libraries which may be distributed under -different licenses to the above. Information about these licenses -is detailed in the LICENSES_THIRD_PARTY file at the root of the source -tree. You must agree to the terms of these licenses, in addition to -the above DSpace source code license, in order to use this software. +DAMAGE. \ No newline at end of file diff --git a/LICENSE_HEADER b/LICENSE.header similarity index 100% rename from LICENSE_HEADER rename to LICENSE.header diff --git a/LICENSES_THIRD_PARTY b/LICENSES_THIRD_PARTY index 803920b0f211..e494c80c5d6e 100644 --- a/LICENSES_THIRD_PARTY +++ b/LICENSES_THIRD_PARTY @@ -15,174 +15,178 @@ PLEASE NOTE: Some dependencies may be listed under multiple licenses if they are dual-licensed. This is especially true of anything listed as "GNU General Public Library" below, as DSpace actually does NOT allow for any dependencies that are solely released under GPL terms. For more info see: -https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines +https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines --------------------------------------------------- - (MIT-style) netCDF C library license: - - * CDM core library (edu.ucar:cdm:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/documentation.htm) - * GRIB IOSP and Feature Collection (edu.ucar:grib:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/) - * HttpClient Wrappers (edu.ucar:httpservices:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/documentation.htm) - * netCDF-4 IOSP JNI connection to C library (edu.ucar:netcdf4:4.5.5 - http://www.unidata.ucar.edu/software/netcdf-java/netcdf4/) - * udunits (edu.ucar:udunits:4.5.5 - http://www.unidata.ucar.edu/software/udunits//) - - 3-Clause BSD License: - - * Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/) - - Apache License v2: - - * parso (com.epam:parso:2.0.11 - https://github.com/epam/parso) - - Apache License v2.0: - - * Java Native Access (net.java.dev.jna:jna:5.5.0 - https://github.com/java-native-access/jna) - - Apache License, 2.0: - - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - - Apache License, version 2.0: - - * JBoss Logging 3 (org.jboss.logging:jboss-logging:3.3.2.Final - http://www.jboss.org) - Apache Software License, Version 2.0: * Ant-Contrib Tasks (ant-contrib:ant-contrib:1.0b3 - http://ant-contrib.sourceforge.net) - * AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.10.50 - https://aws.amazon.com/sdkforjava) - * AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.10.50 - https://aws.amazon.com/sdkforjava) - * AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.10.50 - https://aws.amazon.com/sdkforjava) - * jcommander (com.beust:jcommander:1.78 - https://jcommander.org) + * AWS SDK for Java - Core (com.amazonaws:aws-java-sdk-core:1.12.261 - https://aws.amazon.com/sdkforjava) + * AWS Java SDK for AWS KMS (com.amazonaws:aws-java-sdk-kms:1.12.261 - https://aws.amazon.com/sdkforjava) + * AWS Java SDK for Amazon S3 (com.amazonaws:aws-java-sdk-s3:1.12.261 - https://aws.amazon.com/sdkforjava) + * JMES Path Query library (com.amazonaws:jmespath-java:1.12.261 - https://aws.amazon.com/sdkforjava) * HPPC Collections (com.carrotsearch:hppc:0.8.1 - http://labs.carrotsearch.com/hppc.html/hppc) + * com.drewnoakes:metadata-extractor (com.drewnoakes:metadata-extractor:2.18.0 - https://drewnoakes.com/code/exif/) + * parso (com.epam:parso:2.0.14 - https://github.com/epam/parso) + * Esri Geometry API for Java (com.esri.geometry:esri-geometry-api:2.2.0 - https://github.com/Esri/geometry-api-java) * ClassMate (com.fasterxml:classmate:1.3.0 - http://github.com/cowtowncoder/java-classmate) - * Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.12.3 - http://github.com/FasterXML/jackson) - * Jackson-core (com.fasterxml.jackson.core:jackson-core:2.12.3 - https://github.com/FasterXML/jackson-core) - * jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.12.3 - http://github.com/FasterXML/jackson) - * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.11.2 - http://github.com/FasterXML/jackson-dataformats-binary) - * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) - * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) - * Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.10.3 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) + * Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.13.4 - http://github.com/FasterXML/jackson) + * Jackson-core (com.fasterxml.jackson.core:jackson-core:2.13.4 - https://github.com/FasterXML/jackson-core) + * jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.2 - http://github.com/FasterXML/jackson) + * Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.12.6 - http://github.com/FasterXML/jackson-dataformats-binary) + * Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.13.3 - http://github.com/FasterXML/jackson-dataformats-binary) + * Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.1 - https://github.com/FasterXML/jackson-dataformats-text) + * Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) + * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.1 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + * Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + * Jackson-module-parameter-names (com.fasterxml.jackson.module:jackson-module-parameter-names:2.13.5 - https://github.com/FasterXML/jackson-modules-java8/jackson-module-parameter-names) + * Java UUID Generator (com.fasterxml.uuid:java-uuid-generator:4.0.1 - https://github.com/cowtowncoder/java-uuid-generator) + * Woodstox (com.fasterxml.woodstox:woodstox-core:6.2.4 - https://github.com/FasterXML/woodstox) * zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.6 - https://github.com/flipkart-incubator/zjsonpatch/) - * Caffeine cache (com.github.ben-manes.caffeine:caffeine:2.8.4 - https://github.com/ben-manes/caffeine) - * Open JSON (com.github.openjson:openjson:1.0.12 - https://github.com/openjson/openjson) + * Caffeine cache (com.github.ben-manes.caffeine:caffeine:2.9.2 - https://github.com/ben-manes/caffeine) + * btf (com.github.java-json-tools:btf:1.3 - https://github.com/java-json-tools/btf) + * jackson-coreutils (com.github.java-json-tools:jackson-coreutils:2.0 - https://github.com/java-json-tools/jackson-coreutils) + * jackson-coreutils-equivalence (com.github.java-json-tools:jackson-coreutils-equivalence:1.0 - https://github.com/java-json-tools/jackson-coreutils) + * json-schema-core (com.github.java-json-tools:json-schema-core:1.2.14 - https://github.com/java-json-tools/json-schema-core) + * json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator) + * msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple) + * uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template) * JCIP Annotations under Apache License (com.github.stephenc.jcip:jcip-annotations:1.0-1 - http://stephenc.github.com/jcip-annotations) * Google APIs Client Library for Java (com.google.api-client:google-api-client:1.23.0 - https://github.com/google/google-api-java-client/google-api-client) * Google Analytics API v3-rev145-1.23.0 (com.google.apis:google-api-services-analytics:v3-rev145-1.23.0 - http://nexus.sonatype.org/oss-repository-hosting.html/google-api-services-analytics) * FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.1 - http://findbugs.sourceforge.net/) - * Gson (com.google.code.gson:gson:2.8.6 - https://github.com/google/gson/gson) - * error-prone annotations (com.google.errorprone:error_prone_annotations:2.3.4 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations) + * Gson (com.google.code.gson:gson:2.9.0 - https://github.com/google/gson/gson) + * error-prone annotations (com.google.errorprone:error_prone_annotations:2.18.0 - https://errorprone.info/error_prone_annotations) * Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess) - * Guava: Google Core Libraries for Java (com.google.guava:guava:30.0-jre - https://github.com/google/guava/guava) + * Guava: Google Core Libraries for Java (com.google.guava:guava:32.0.0-jre - https://github.com/google/guava) * Guava: Google Core Libraries for Java (JDK5 Backport) (com.google.guava:guava-jdk5:17.0 - http://code.google.com/p/guava-libraries/guava-jdk5) * Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture) * Google HTTP Client Library for Java (com.google.http-client:google-http-client:1.23.0 - https://github.com/google/google-http-java-client/google-http-client) + * GSON extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-gson:1.41.7 - https://github.com/googleapis/google-http-java-client/google-http-client-gson) * Jackson 2 extensions to the Google HTTP Client Library for Java. (com.google.http-client:google-http-client-jackson2:1.23.0 - https://github.com/google/google-http-java-client/google-http-client-jackson2) - * J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/) - * Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.23.0 - https://github.com/google/google-oauth-java-client/google-oauth-client) + * J2ObjC Annotations (com.google.j2objc:j2objc-annotations:2.8 - https://github.com/google/j2objc/) + * Google OAuth Client Library for Java (com.google.oauth-client:google-oauth-client:1.33.3 - https://github.com/googleapis/google-oauth-java-client/google-oauth-client) * ConcurrentLinkedHashMap (com.googlecode.concurrentlinkedhashmap:concurrentlinkedhashmap-lru:1.4.2 - http://code.google.com/p/concurrentlinkedhashmap) - * JSON.simple (com.googlecode.json-simple:json-simple:1.1.1 - http://code.google.com/p/json-simple/) - * Jackcess (com.healthmarketscience.jackcess:jackcess:3.0.1 - https://jackcess.sourceforge.io) - * Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:3.0.0 - http://jackcessencrypt.sf.net) - * project ':json-path' (com.jayway.jsonpath:json-path:2.4.0 - https://github.com/jayway/JsonPath) - * project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.4.0 - https://github.com/jayway/JsonPath) + * libphonenumber (com.googlecode.libphonenumber:libphonenumber:8.11.1 - https://github.com/google/libphonenumber/) + * Jackcess (com.healthmarketscience.jackcess:jackcess:4.0.2 - https://jackcess.sourceforge.io) + * Jackcess Encrypt (com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 - http://jackcessencrypt.sf.net) + * project ':json-path' (com.jayway.jsonpath:json-path:2.6.0 - https://github.com/jayway/JsonPath) + * project ':json-path-assert' (com.jayway.jsonpath:json-path-assert:2.6.0 - https://github.com/jayway/JsonPath) * Disruptor Framework (com.lmax:disruptor:3.4.2 - http://lmax-exchange.github.com/disruptor) * builder-commons (com.lyncode:builder-commons:1.0.2 - http://nexus.sonatype.org/oss-repository-hosting.html/builder-commons) * MaxMind DB Reader (com.maxmind.db:maxmind-db:1.2.2 - http://dev.maxmind.com/) * MaxMind GeoIP2 API (com.maxmind.geoip2:geoip2:2.11.0 - http://dev.maxmind.com/geoip/geoip2/web-services) * Nimbus JOSE+JWT (com.nimbusds:nimbus-jose-jwt:7.9 - https://bitbucket.org/connect2id/nimbus-jose-jwt) - * opencsv (com.opencsv:opencsv:5.2 - http://opencsv.sf.net) + * opencsv (com.opencsv:opencsv:5.6 - http://opencsv.sf.net) * java-libpst (com.pff:java-libpst:0.9.3 - https://github.com/rjohnsondev/java-libpst) - * rome (com.rometools:rome:1.12.2 - http://rometools.com/rome) - * rome-utils (com.rometools:rome-utils:1.12.2 - http://rometools.com/rome-utils) + * rome (com.rometools:rome:1.19.0 - http://rometools.com/rome) + * rome-modules (com.rometools:rome-modules:1.19.0 - http://rometools.com/rome-modules) + * rome-utils (com.rometools:rome-utils:1.19.0 - http://rometools.com/rome-utils) * fastinfoset (com.sun.xml.fastinfoset:FastInfoset:1.2.15 - http://fi.java.net) * T-Digest (com.tdunning:t-digest:3.1 - https://github.com/tdunning/t-digest) + * config (com.typesafe:config:1.3.3 - https://github.com/lightbend/config) + * ssl-config-core (com.typesafe:ssl-config-core_2.13:0.3.8 - https://github.com/lightbend/ssl-config) + * akka-actor (com.typesafe.akka:akka-actor_2.13:2.5.31 - https://akka.io/) + * akka-http-core (com.typesafe.akka:akka-http-core_2.13:10.1.12 - https://akka.io) + * akka-http (com.typesafe.akka:akka-http_2.13:10.1.12 - https://akka.io) + * akka-parsing (com.typesafe.akka:akka-parsing_2.13:10.1.12 - https://akka.io) + * akka-protobuf (com.typesafe.akka:akka-protobuf_2.13:2.5.31 - https://akka.io/) + * akka-stream (com.typesafe.akka:akka-stream_2.13:2.5.31 - https://akka.io/) + * scala-logging (com.typesafe.scala-logging:scala-logging_2.13:3.9.2 - https://github.com/lightbend/scala-logging) * JSON library from Android SDK (com.vaadin.external.google:android-json:0.0.20131108.vaadin1 - http://developer.android.com/sdk) - * HikariCP (com.zaxxer:HikariCP-java7:2.4.13 - https://github.com/brettwooldridge/HikariCP) * SparseBitSet (com.zaxxer:SparseBitSet:1.2 - https://github.com/brettwooldridge/SparseBitSet) * Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/) * Apache Commons CLI (commons-cli:commons-cli:1.4 - http://commons.apache.org/proper/commons-cli/) * Apache Commons Codec (commons-codec:commons-codec:1.10 - http://commons.apache.org/proper/commons-codec/) * Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/collections/) * Commons Digester (commons-digester:commons-digester:1.8.1 - http://commons.apache.org/digester/) - * Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.3 - http://commons.apache.org/proper/commons-fileupload/) + * Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.5 - https://commons.apache.org/proper/commons-fileupload/) * Apache Commons IO (commons-io:commons-io:2.7 - https://commons.apache.org/proper/commons-io/) * Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/) * Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/) * Apache Commons Validator (commons-validator:commons-validator:1.5.0 - http://commons.apache.org/proper/commons-validator/) - * Boilerpipe -- Boilerplate Removal and Fulltext Extraction from HTML pages (de.l3s.boilerpipe:boilerpipe:1.1.0 - http://code.google.com/p/boilerpipe/) + * GeoJson POJOs for Jackson (de.grundid.opendatalab:geojson-jackson:1.14 - https://github.com/opendatalab-de/geojson-jackson) + * OpenAIRE Funders Model (eu.openaire:funders-model:2.0.0 - https://api.openaire.eu) * Metrics Core (io.dropwizard.metrics:metrics-core:4.1.5 - https://metrics.dropwizard.io/metrics-core) * Graphite Integration for Metrics (io.dropwizard.metrics:metrics-graphite:4.1.5 - https://metrics.dropwizard.io/metrics-graphite) * Metrics Integration for Jetty 9.3 and higher (io.dropwizard.metrics:metrics-jetty9:4.1.5 - https://metrics.dropwizard.io/metrics-jetty9) * Metrics Integration with JMX (io.dropwizard.metrics:metrics-jmx:4.1.5 - https://metrics.dropwizard.io/metrics-jmx) * JVM Integration for Metrics (io.dropwizard.metrics:metrics-jvm:4.1.5 - https://metrics.dropwizard.io/metrics-jvm) - * Netty (io.netty:netty:3.10.6.Final - http://netty.io/) - * Netty/Buffer (io.netty:netty-buffer:4.1.50.Final - https://netty.io/netty-buffer/) - * Netty/Codec (io.netty:netty-codec:4.1.50.Final - https://netty.io/netty-codec/) - * Netty/Common (io.netty:netty-common:4.1.50.Final - https://netty.io/netty-common/) - * Netty/Handler (io.netty:netty-handler:4.1.50.Final - https://netty.io/netty-handler/) - * Netty/Resolver (io.netty:netty-resolver:4.1.50.Final - https://netty.io/netty-resolver/) - * Netty/Transport (io.netty:netty-transport:4.1.50.Final - https://netty.io/netty-transport/) - * Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.50.Final - https://netty.io/netty-transport-native-epoll/) - * Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.50.Final - https://netty.io/netty-transport-native-unix-common/) + * micrometer-core (io.micrometer:micrometer-core:1.9.11 - https://github.com/micrometer-metrics/micrometer) + * Netty/Buffer (io.netty:netty-buffer:4.1.68.Final - https://netty.io/netty-buffer/) + * Netty/Codec (io.netty:netty-codec:4.1.68.Final - https://netty.io/netty-codec/) + * Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.53.Final - https://netty.io/netty-codec-http/) + * Netty/Codec/Socks (io.netty:netty-codec-socks:4.1.53.Final - https://netty.io/netty-codec-socks/) + * Netty/Common (io.netty:netty-common:4.1.68.Final - https://netty.io/netty-common/) + * Netty/Handler (io.netty:netty-handler:4.1.68.Final - https://netty.io/netty-handler/) + * Netty/Handler/Proxy (io.netty:netty-handler-proxy:4.1.53.Final - https://netty.io/netty-handler-proxy/) + * Netty/Resolver (io.netty:netty-resolver:4.1.68.Final - https://netty.io/netty-resolver/) + * Netty/Transport (io.netty:netty-transport:4.1.68.Final - https://netty.io/netty-transport/) + * Netty/Transport/Native/Epoll (io.netty:netty-transport-native-epoll:4.1.68.Final - https://netty.io/netty-transport-native-epoll/) + * Netty/Transport/Native/Unix/Common (io.netty:netty-transport-native-unix-common:4.1.68.Final - https://netty.io/netty-transport-native-unix-common/) * OpenTracing API (io.opentracing:opentracing-api:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-api) * OpenTracing-noop (io.opentracing:opentracing-noop:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-noop) * OpenTracing-util (io.opentracing:opentracing-util:0.33.0 - https://github.com/opentracing/opentracing-java/opentracing-util) * Google S2 geometry library (io.sgr:s2-geometry-library-java:1.0.0 - https://github.com/sgr-io/s2-geometry-library-java) + * swagger-annotations (io.swagger:swagger-annotations:1.6.2 - https://github.com/swagger-api/swagger-core/modules/swagger-annotations) + * swagger-compat-spec-parser (io.swagger:swagger-compat-spec-parser:1.0.52 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-compat-spec-parser) + * swagger-core (io.swagger:swagger-core:1.6.2 - https://github.com/swagger-api/swagger-core/modules/swagger-core) + * swagger-models (io.swagger:swagger-models:1.6.2 - https://github.com/swagger-api/swagger-core/modules/swagger-models) + * swagger-parser (io.swagger:swagger-parser:1.0.52 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser) + * swagger-annotations (io.swagger.core.v3:swagger-annotations:2.1.5 - https://github.com/swagger-api/swagger-core/modules/swagger-annotations) + * swagger-core (io.swagger.core.v3:swagger-core:2.1.5 - https://github.com/swagger-api/swagger-core/modules/swagger-core) + * swagger-models (io.swagger.core.v3:swagger-models:2.1.5 - https://github.com/swagger-api/swagger-core/modules/swagger-models) + * swagger-parser (io.swagger.parser.v3:swagger-parser:2.0.23 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser) + * swagger-parser (io.swagger.parser.v3:swagger-parser-core:2.0.23 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser-core) + * swagger-parser-v2-converter (io.swagger.parser.v3:swagger-parser-v2-converter:2.0.23 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser-v2-converter) + * swagger-parser-v3 (io.swagger.parser.v3:swagger-parser-v3:2.0.23 - http://nexus.sonatype.org/oss-repository-hosting.html/swagger-parser-project/modules/swagger-parser-v3) * Jakarta Bean Validation API (jakarta.validation:jakarta.validation-api:2.0.2 - https://beanvalidation.org) + * JSR107 API and SPI (javax.cache:cache-api:1.1.0 - https://github.com/jsr107/jsr107spec) * javax.inject (javax.inject:javax.inject:1 - http://code.google.com/p/atinject/) * Bean Validation API (javax.validation:validation-api:2.0.1.Final - http://beanvalidation.org) * jdbm (jdbm:jdbm:1.0 - no url defined) * Joda-Time (joda-time:joda-time:2.9.2 - http://www.joda.org/joda-time/) - * Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.10.2 - https://bytebuddy.net/byte-buddy) - * Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.10.20 - https://bytebuddy.net/byte-buddy) - * Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.10.20 - https://bytebuddy.net/byte-buddy-agent) + * Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.11.13 - https://bytebuddy.net/byte-buddy) + * Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.11.13 - https://bytebuddy.net/byte-buddy-agent) * eigenbase-properties (net.hydromatic:eigenbase-properties:1.1.5 - http://github.com/julianhyde/eigenbase-properties) + * json-unit-core (net.javacrumbs.json-unit:json-unit-core:2.19.0 - https://github.com/lukas-krecan/JsonUnit/json-unit-core) * "Java Concurrency in Practice" book annotations (net.jcip:jcip-annotations:1.0 - http://jcip.net/) * ASM based accessors helper used by json-smart (net.minidev:accessors-smart:1.2 - http://www.minidev.net/) + * ASM based accessors helper used by json-smart (net.minidev:accessors-smart:2.4.7 - https://urielch.github.io/) * JSON Small and Fast Parser (net.minidev:json-smart:2.3 - http://www.minidev.net/) - * ehcache (net.sf.ehcache:ehcache:2.10.6 - http://ehcache.org) - * Ehcache Core (net.sf.ehcache:ehcache-core:2.6.11 - http://ehcache.org) + * JSON Small and Fast Parser (net.minidev:json-smart:2.4.7 - https://urielch.github.io/) * Abdera Core (org.apache.abdera:abdera-core:1.1.3 - http://abdera.apache.org/abdera-core) * I18N Libraries (org.apache.abdera:abdera-i18n:1.1.3 - http://abdera.apache.org) - * Apache Ant Core (org.apache.ant:ant:1.10.9 - https://ant.apache.org/) - * Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.9 - https://ant.apache.org/) - * Apache Commons BCEL (org.apache.bcel:bcel:6.4.0 - https://commons.apache.org/proper/commons-bcel) - * Calcite Core (org.apache.calcite:calcite-core:1.18.0 - https://calcite.apache.org/calcite-core) - * Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.18.0 - https://calcite.apache.org/calcite-linq4j) - * Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.13.0 - https://calcite.apache.org/avatica/avatica-core) + * Apache Ant Core (org.apache.ant:ant:1.10.11 - https://ant.apache.org/) + * Apache Ant Launcher (org.apache.ant:ant-launcher:1.10.11 - https://ant.apache.org/) + * Apache Commons BCEL (org.apache.bcel:bcel:6.6.0 - https://commons.apache.org/proper/commons-bcel) + * Calcite Core (org.apache.calcite:calcite-core:1.27.0 - https://calcite.apache.org) + * Calcite Linq4j (org.apache.calcite:calcite-linq4j:1.27.0 - https://calcite.apache.org) + * Apache Calcite Avatica (org.apache.calcite.avatica:avatica-core:1.18.0 - https://calcite.apache.org/avatica) * Apache Commons Collections (org.apache.commons:commons-collections4:4.1 - http://commons.apache.org/proper/commons-collections/) - * Apache Commons Compress (org.apache.commons:commons-compress:1.20 - https://commons.apache.org/proper/commons-compress/) - * Apache Commons Configuration (org.apache.commons:commons-configuration2:2.7 - https://commons.apache.org/proper/commons-configuration/) - * Apache Commons CSV (org.apache.commons:commons-csv:1.8 - https://commons.apache.org/proper/commons-csv/) - * Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.8.0 - https://commons.apache.org/dbcp/) + * Apache Commons Compress (org.apache.commons:commons-compress:1.21 - https://commons.apache.org/proper/commons-compress/) + * Apache Commons Configuration (org.apache.commons:commons-configuration2:2.8.0 - https://commons.apache.org/proper/commons-configuration/) + * Apache Commons CSV (org.apache.commons:commons-csv:1.9.0 - https://commons.apache.org/proper/commons-csv/) + * Apache Commons DBCP (org.apache.commons:commons-dbcp2:2.9.0 - https://commons.apache.org/dbcp/) * Apache Commons Exec (org.apache.commons:commons-exec:1.3 - http://commons.apache.org/proper/commons-exec/) - * Apache Commons Lang (org.apache.commons:commons-lang3:3.7 - http://commons.apache.org/proper/commons-lang/) + * Apache Commons Lang (org.apache.commons:commons-lang3:3.12.0 - https://commons.apache.org/proper/commons-lang/) * Apache Commons Math (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/) - * Apache Commons Pool (org.apache.commons:commons-pool2:2.9.0 - https://commons.apache.org/proper/commons-pool/) - * Apache Commons Text (org.apache.commons:commons-text:1.8 - https://commons.apache.org/proper/commons-text) + * Apache Commons Pool (org.apache.commons:commons-pool2:2.11.1 - https://commons.apache.org/proper/commons-pool/) + * Apache Commons Text (org.apache.commons:commons-text:1.10.0 - https://commons.apache.org/proper/commons-text) * Curator Client (org.apache.curator:curator-client:2.13.0 - http://curator.apache.org/curator-client) * Curator Framework (org.apache.curator:curator-framework:2.13.0 - http://curator.apache.org/curator-framework) * Curator Recipes (org.apache.curator:curator-recipes:2.13.0 - http://curator.apache.org/curator-recipes) - * Apache CXF Core (org.apache.cxf:cxf-core:3.3.6 - https://cxf.apache.org) - * Apache CXF Runtime JAX-RS Frontend (org.apache.cxf:cxf-rt-frontend-jaxrs:3.3.6 - https://cxf.apache.org) - * Apache CXF JAX-RS Client (org.apache.cxf:cxf-rt-rs-client:3.3.6 - https://cxf.apache.org) - * Apache CXF Runtime Security functionality (org.apache.cxf:cxf-rt-security:3.3.6 - https://cxf.apache.org) - * Apache CXF Runtime HTTP Transport (org.apache.cxf:cxf-rt-transports-http:3.3.6 - https://cxf.apache.org) - * JTA 1.1 (org.apache.geronimo.specs:geronimo-jta_1.1_spec:1.1.1 - http://geronimo.apache.org/specs/geronimo-jta_1.1_spec) - * Web Services Metadata 2.0 (org.apache.geronimo.specs:geronimo-ws-metadata_2.0_spec:1.1.3 - http://geronimo.apache.org/maven/specs/geronimo-ws-metadata_2.0_spec/1.1.3) - * Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:3.2.0 - no url defined) - * Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:3.2.0 - no url defined) - * Apache Hadoop Common (org.apache.hadoop:hadoop-common:3.2.0 - no url defined) - * Apache Hadoop HDFS Client (org.apache.hadoop:hadoop-hdfs-client:3.2.0 - no url defined) + * Apache Hadoop Annotations (org.apache.hadoop:hadoop-annotations:3.2.2 - no url defined) + * Apache Hadoop Auth (org.apache.hadoop:hadoop-auth:3.2.2 - no url defined) + * Apache Hadoop Common (org.apache.hadoop:hadoop-common:3.2.2 - no url defined) + * Apache Hadoop HDFS Client (org.apache.hadoop:hadoop-hdfs-client:3.2.2 - no url defined) * htrace-core4 (org.apache.htrace:htrace-core4:4.1.0-incubating - http://incubator.apache.org/projects/htrace.html) * Apache HttpClient (org.apache.httpcomponents:httpclient:4.5.13 - http://hc.apache.org/httpcomponents-client) * Apache HttpClient Cache (org.apache.httpcomponents:httpclient-cache:4.2.6 - http://hc.apache.org/httpcomponents-client) - * Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.4 - http://hc.apache.org/httpcomponents-core-ga) - * Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.5.12 - http://hc.apache.org/httpcomponents-client) - * Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.3 - http://james.apache.org/mime4j/apache-mime4j-core) - * Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.3 - http://james.apache.org/mime4j/apache-mime4j-dom) + * Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.15 - http://hc.apache.org/httpcomponents-core-ga) + * Apache HttpClient Mime (org.apache.httpcomponents:httpmime:4.5.13 - http://hc.apache.org/httpcomponents-client) + * Apache James :: Mime4j :: Core (org.apache.james:apache-mime4j-core:0.8.4 - http://james.apache.org/mime4j/apache-mime4j-core) + * Apache James :: Mime4j :: DOM (org.apache.james:apache-mime4j-dom:0.8.4 - http://james.apache.org/mime4j/apache-mime4j-dom) * Apache Jena - Libraries POM (org.apache.jena:apache-jena-libs:2.13.0 - http://jena.apache.org/apache-jena-libs/) * Apache Jena - ARQ (SPARQL 1.1 Query Engine) (org.apache.jena:jena-arq:2.13.0 - http://jena.apache.org/jena-arq/) * Apache Jena - Core (org.apache.jena:jena-core:2.13.0 - http://jena.apache.org/jena-core/) @@ -192,263 +196,278 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines * Kerby-kerb Util (org.apache.kerby:kerb-util:1.0.1 - http://directory.apache.org/kerby/kerby-kerb/kerb-util) * Kerby ASN1 Project (org.apache.kerby:kerby-asn1:1.0.1 - http://directory.apache.org/kerby/kerby-common/kerby-asn1) * Kerby PKIX Project (org.apache.kerby:kerby-pkix:1.0.1 - http://directory.apache.org/kerby/kerby-pkix) - * Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/) - * Apache Log4j API (org.apache.logging.log4j:log4j-api:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-api/) - * Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-core/) - * Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-jul/) - * Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) - * Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.13.3 - https://logging.apache.org/log4j/2.x/log4j-web/) - * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) - * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) - * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) - * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) - * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) - * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) - * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) - * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) - * Lucene Classification (org.apache.lucene:lucene-classification:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-classification) - * Lucene codecs (org.apache.lucene:lucene-codecs:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-codecs) - * Lucene Core (org.apache.lucene:lucene-core:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-core) - * Lucene Expressions (org.apache.lucene:lucene-expressions:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-expressions) - * Lucene Grouping (org.apache.lucene:lucene-grouping:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-grouping) - * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-highlighter) - * Lucene Join (org.apache.lucene:lucene-join:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-join) - * Lucene Memory (org.apache.lucene:lucene-memory:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-memory) - * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-misc) - * Lucene Queries (org.apache.lucene:lucene-queries:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-queries) - * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-queryparser) - * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-sandbox) - * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) - * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) - * Lucene Suggest (org.apache.lucene:lucene-suggest:8.8.1 - https://lucene.apache.org/lucene-parent/lucene-suggest) - * Apache OpenNLP Tools (org.apache.opennlp:opennlp-tools:1.9.2 - https://www.apache.org/opennlp/opennlp-tools/) - * Apache FontBox (org.apache.pdfbox:fontbox:2.0.24 - http://pdfbox.apache.org/) - * PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.3 - https://www.apache.org/jbig2-imageio/) - * Apache JempBox (org.apache.pdfbox:jempbox:1.8.16 - http://www.apache.org/pdfbox-parent/jempbox/) - * Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.24 - https://www.apache.org/pdfbox-parent/pdfbox/) - * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.19 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) - * Apache Preflight (org.apache.pdfbox:preflight:2.0.19 - https://www.apache.org/pdfbox-parent/preflight/) - * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.19 - https://www.apache.org/pdfbox-parent/xmpbox/) - * Apache POI (org.apache.poi:poi:3.17 - http://poi.apache.org/) - * Apache POI (org.apache.poi:poi-ooxml:3.17 - http://poi.apache.org/) - * Apache POI (org.apache.poi:poi-ooxml-schemas:3.17 - http://poi.apache.org/) - * Apache POI (org.apache.poi:poi-scratchpad:3.17 - http://poi.apache.org/) - * Apache SIS features (org.apache.sis.core:sis-feature:1.0 - http://sis.apache.org/core/sis-feature) - * Apache SIS metadata (org.apache.sis.core:sis-metadata:1.0 - http://sis.apache.org/core/sis-metadata) - * Apache SIS referencing (org.apache.sis.core:sis-referencing:1.0 - http://sis.apache.org/core/sis-referencing) - * Apache SIS utilities (org.apache.sis.core:sis-utility:1.0 - http://sis.apache.org/core/sis-utility) - * Apache SIS netCDF storage (org.apache.sis.storage:sis-netcdf:1.0 - http://sis.apache.org/storage/sis-netcdf) - * Apache SIS common storage (org.apache.sis.storage:sis-storage:1.0 - http://sis.apache.org/storage/sis-storage) - * Apache Solr Content Extraction Library (org.apache.solr:solr-cell:8.8.1 - https://lucene.apache.org/solr-parent/solr-cell) - * Apache Solr Core (org.apache.solr:solr-core:8.8.1 - https://lucene.apache.org/solr-parent/solr-core) - * Apache Solr Solrj (org.apache.solr:solr-solrj:8.8.1 - https://lucene.apache.org/solr-parent/solr-solrj) + * Apache Log4j 1.x Compatibility API (org.apache.logging.log4j:log4j-1.2-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-1.2-api/) + * Apache Log4j API (org.apache.logging.log4j:log4j-api:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-api/) + * Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-core/) + * Apache Log4j JUL Adapter (org.apache.logging.log4j:log4j-jul:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-jul/) + * Apache Log4j Layout for JSON template (org.apache.logging.log4j:log4j-layout-template-json:2.17.1 - https://logging.apache.org/log4j/2.x/log4j-layout-template-json/) + * Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/) + * Apache Log4j Web (org.apache.logging.log4j:log4j-web:2.20.0 - https://logging.apache.org/log4j/2.x/log4j-web/) + * Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-common) + * Lucene ICU Analysis Components (org.apache.lucene:lucene-analyzers-icu:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-icu) + * Lucene Kuromoji Japanese Morphological Analyzer (org.apache.lucene:lucene-analyzers-kuromoji:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-kuromoji) + * Lucene Nori Korean Morphological Analyzer (org.apache.lucene:lucene-analyzers-nori:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-nori) + * Lucene Phonetic Filters (org.apache.lucene:lucene-analyzers-phonetic:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-phonetic) + * Lucene Smart Chinese Analyzer (org.apache.lucene:lucene-analyzers-smartcn:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-smartcn) + * Lucene Stempel Analyzer (org.apache.lucene:lucene-analyzers-stempel:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-analyzers-stempel) + * Lucene Memory (org.apache.lucene:lucene-backward-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-backward-codecs) + * Lucene Classification (org.apache.lucene:lucene-classification:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-classification) + * Lucene codecs (org.apache.lucene:lucene-codecs:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-codecs) + * Lucene Core (org.apache.lucene:lucene-core:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-core) + * Lucene Expressions (org.apache.lucene:lucene-expressions:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-expressions) + * Lucene Grouping (org.apache.lucene:lucene-grouping:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-grouping) + * Lucene Highlighter (org.apache.lucene:lucene-highlighter:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-highlighter) + * Lucene Join (org.apache.lucene:lucene-join:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-join) + * Lucene Memory (org.apache.lucene:lucene-memory:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-memory) + * Lucene Miscellaneous (org.apache.lucene:lucene-misc:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-misc) + * Lucene Queries (org.apache.lucene:lucene-queries:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queries) + * Lucene QueryParsers (org.apache.lucene:lucene-queryparser:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-queryparser) + * Lucene Sandbox (org.apache.lucene:lucene-sandbox:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-sandbox) + * Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial-extras) + * Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-spatial3d) + * Lucene Suggest (org.apache.lucene:lucene-suggest:8.11.2 - https://lucene.apache.org/lucene-parent/lucene-suggest) + * Apache FontBox (org.apache.pdfbox:fontbox:2.0.28 - http://pdfbox.apache.org/) + * PDFBox JBIG2 ImageIO plugin (org.apache.pdfbox:jbig2-imageio:3.0.4 - https://www.apache.org/jbig2-imageio/) + * Apache JempBox (org.apache.pdfbox:jempbox:1.8.17 - http://www.apache.org/pdfbox-parent/jempbox/) + * Apache PDFBox (org.apache.pdfbox:pdfbox:2.0.28 - https://www.apache.org/pdfbox-parent/pdfbox/) + * Apache PDFBox tools (org.apache.pdfbox:pdfbox-tools:2.0.27 - https://www.apache.org/pdfbox-parent/pdfbox-tools/) + * Apache XmpBox (org.apache.pdfbox:xmpbox:2.0.27 - https://www.apache.org/pdfbox-parent/xmpbox/) + * Apache POI - Common (org.apache.poi:poi:5.2.3 - https://poi.apache.org/) + * Apache POI - API based on OPC and OOXML schemas (org.apache.poi:poi-ooxml:5.2.3 - https://poi.apache.org/) + * Apache POI (org.apache.poi:poi-ooxml-lite:5.2.3 - https://poi.apache.org/) + * Apache POI (org.apache.poi:poi-scratchpad:5.2.3 - https://poi.apache.org/) + * Apache Solr Core (org.apache.solr:solr-core:8.11.2 - https://lucene.apache.org/solr-parent/solr-core) + * Apache Solr Solrj (org.apache.solr:solr-solrj:8.11.2 - https://lucene.apache.org/solr-parent/solr-solrj) * Apache Standard Taglib Implementation (org.apache.taglibs:taglibs-standard-impl:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-impl) * Apache Standard Taglib Specification API (org.apache.taglibs:taglibs-standard-spec:1.2.5 - http://tomcat.apache.org/taglibs/standard-1.2.5/taglibs-standard-spec) * Apache Thrift (org.apache.thrift:libthrift:0.9.2 - http://thrift.apache.org) - * Apache Tika core (org.apache.tika:tika-core:1.24.1 - http://tika.apache.org/) - * Apache Tika Java-7 Components (org.apache.tika:tika-java7:1.24.1 - http://tika.apache.org/) - * Apache Tika parsers (org.apache.tika:tika-parsers:1.24.1 - http://tika.apache.org/) - * Apache Tika XMP (org.apache.tika:tika-xmp:1.24.1 - http://tika.apache.org/) - * tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.33 - https://tomcat.apache.org/) - * tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.33 - https://tomcat.apache.org/) - * tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.33 - https://tomcat.apache.org/) - * Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.0 - http://velocity.apache.org/engine/devel/velocity-engine-core/) + * Apache Tika core (org.apache.tika:tika-core:2.5.0 - https://tika.apache.org/) + * Apache Tika Apple parser module (org.apache.tika:tika-parser-apple-module:2.5.0 - https://tika.apache.org/tika-parser-apple-module/) + * Apache Tika audiovideo parser module (org.apache.tika:tika-parser-audiovideo-module:2.5.0 - https://tika.apache.org/tika-parser-audiovideo-module/) + * Apache Tika cad parser module (org.apache.tika:tika-parser-cad-module:2.5.0 - https://tika.apache.org/tika-parser-cad-module/) + * Apache Tika code parser module (org.apache.tika:tika-parser-code-module:2.5.0 - https://tika.apache.org/tika-parser-code-module/) + * Apache Tika crypto parser module (org.apache.tika:tika-parser-crypto-module:2.5.0 - https://tika.apache.org/tika-parser-crypto-module/) + * Apache Tika digest commons (org.apache.tika:tika-parser-digest-commons:2.5.0 - https://tika.apache.org/tika-parser-digest-commons/) + * Apache Tika font parser module (org.apache.tika:tika-parser-font-module:2.5.0 - https://tika.apache.org/tika-parser-font-module/) + * Apache Tika html parser module (org.apache.tika:tika-parser-html-module:2.5.0 - https://tika.apache.org/tika-parser-html-module/) + * Apache Tika image parser module (org.apache.tika:tika-parser-image-module:2.5.0 - https://tika.apache.org/tika-parser-image-module/) + * Apache Tika mail commons (org.apache.tika:tika-parser-mail-commons:2.5.0 - https://tika.apache.org/tika-parser-mail-commons/) + * Apache Tika mail parser module (org.apache.tika:tika-parser-mail-module:2.5.0 - https://tika.apache.org/tika-parser-mail-module/) + * Apache Tika Microsoft parser module (org.apache.tika:tika-parser-microsoft-module:2.5.0 - https://tika.apache.org/tika-parser-microsoft-module/) + * Apache Tika miscellaneous office format parser module (org.apache.tika:tika-parser-miscoffice-module:2.5.0 - https://tika.apache.org/tika-parser-miscoffice-module/) + * Apache Tika news parser module (org.apache.tika:tika-parser-news-module:2.5.0 - https://tika.apache.org/tika-parser-news-module/) + * Apache Tika OCR parser module (org.apache.tika:tika-parser-ocr-module:2.5.0 - https://tika.apache.org/tika-parser-ocr-module/) + * Apache Tika PDF parser module (org.apache.tika:tika-parser-pdf-module:2.5.0 - https://tika.apache.org/tika-parser-pdf-module/) + * Apache Tika package parser module (org.apache.tika:tika-parser-pkg-module:2.5.0 - https://tika.apache.org/tika-parser-pkg-module/) + * Apache Tika text parser module (org.apache.tika:tika-parser-text-module:2.5.0 - https://tika.apache.org/tika-parser-text-module/) + * Apache Tika WARC parser module (org.apache.tika:tika-parser-webarchive-module:2.5.0 - https://tika.apache.org/tika-parser-webarchive-module/) + * Apache Tika XML parser module (org.apache.tika:tika-parser-xml-module:2.5.0 - https://tika.apache.org/tika-parser-xml-module/) + * Apache Tika XMP commons (org.apache.tika:tika-parser-xmp-commons:2.5.0 - https://tika.apache.org/tika-parser-xmp-commons/) + * Apache Tika ZIP commons (org.apache.tika:tika-parser-zip-commons:2.5.0 - https://tika.apache.org/tika-parser-zip-commons/) + * Apache Tika standard parser package (org.apache.tika:tika-parsers-standard-package:2.5.0 - https://tika.apache.org/tika-parsers/tika-parsers-standard/tika-parsers-standard-package/) + * tomcat-embed-core (org.apache.tomcat.embed:tomcat-embed-core:9.0.75 - https://tomcat.apache.org/) + * tomcat-embed-el (org.apache.tomcat.embed:tomcat-embed-el:9.0.75 - https://tomcat.apache.org/) + * tomcat-embed-websocket (org.apache.tomcat.embed:tomcat-embed-websocket:9.0.75 - https://tomcat.apache.org/) + * Apache Velocity - Engine (org.apache.velocity:velocity-engine-core:2.3 - http://velocity.apache.org/engine/devel/velocity-engine-core/) + * Apache Velocity - JSR 223 Scripting (org.apache.velocity:velocity-engine-scripting:2.2 - http://velocity.apache.org/engine/devel/velocity-engine-scripting/) * Axiom API (org.apache.ws.commons.axiom:axiom-api:1.2.22 - http://ws.apache.org/axiom/) - * LLOM (org.apache.ws.commons.axiom:axiom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/axiom-impl/) * Abdera Model (FOM) Implementation (org.apache.ws.commons.axiom:fom-impl:1.2.22 - http://ws.apache.org/axiom/implementations/fom-impl/) - * XmlSchema Core (org.apache.ws.xmlschema:xmlschema-core:2.2.5 - https://ws.apache.org/commons/xmlschema20/xmlschema-core/) - * XmlBeans (org.apache.xmlbeans:xmlbeans:3.1.0 - https://xmlbeans.apache.org/) - * zookeeper (org.apache.zookeeper:zookeeper:3.4.14 - no url defined) + * XmlBeans (org.apache.xmlbeans:xmlbeans:5.1.1 - https://xmlbeans.apache.org/) + * Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.6.2 - http://zookeeper.apache.org/zookeeper) * Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.6.2 - http://zookeeper.apache.org/zookeeper-jute) - * AssertJ fluent assertions (org.assertj:assertj-core:3.13.2 - http://assertj.org/assertj-core) - * Evo Inflector (org.atteo:evo-inflector:1.2.2 - http://atteo.org/static/evo-inflector) + * org.apiguardian:apiguardian-api (org.apiguardian:apiguardian-api:1.1.0 - https://github.com/apiguardian-team/apiguardian) + * AssertJ fluent assertions (org.assertj:assertj-core:3.22.0 - https://assertj.github.io/doc/assertj-core/) + * Evo Inflector (org.atteo:evo-inflector:1.3 - http://atteo.org/static/evo-inflector) * jose4j (org.bitbucket.b_c:jose4j:0.6.5 - https://bitbucket.org/b_c/jose4j/) * TagSoup (org.ccil.cowan.tagsoup:tagsoup:1.2.1 - http://home.ccil.org/~cowan/XML/tagsoup/) - * Woodstox (org.codehaus.woodstox:woodstox-core-asl:4.4.1 - http://woodstox.codehaus.org) + * jems (org.dmfs:jems:1.18 - https://github.com/dmfs/jems) + * rfc3986-uri (org.dmfs:rfc3986-uri:0.8.1 - https://github.com/dmfs/uri-toolkit) * Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) - * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) - * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) - * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) - * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) + * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) + * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) - * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-client) - * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-deploy) - * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-http) - * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-io) - * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-jmx) + * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy) + * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http) + * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io) + * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) - * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-rewrite) - * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-server) - * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlet) - * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlets) - * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-util) - * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-webapp) - * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-xml) - * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-client) - * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-common) - * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-hpack) - * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) - * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-server) + * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server) + * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet) + * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets) + * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util) + * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax) + * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp) + * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml) + * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client) + * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common) + * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack) + * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) + * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) - * flyway-core (org.flywaydb:flyway-core:6.5.5 - https://flywaydb.org/flyway-core) + * Ehcache (org.ehcache:ehcache:3.4.0 - http://ehcache.org) + * flyway-core (org.flywaydb:flyway-core:8.4.4 - https://flywaydb.org/flyway-core) * Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.8 - https://github.com/Gagravarr/VorbisJava) * Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.8 - https://github.com/Gagravarr/VorbisJava) - * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.0.18.Final - http://hibernate.org/validator/hibernate-validator) - * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.0.18.Final - http://hibernate.org/validator/hibernate-validator-cdi) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * Hibernate Validator Engine (org.hibernate.validator:hibernate-validator:6.2.5.Final - http://hibernate.org/validator/hibernate-validator) + * Hibernate Validator Portable Extension (org.hibernate.validator:hibernate-validator-cdi:6.2.5.Final - http://hibernate.org/validator/hibernate-validator-cdi) + * leveldb (org.iq80.leveldb:leveldb:0.12 - http://github.com/dain/leveldb/leveldb) + * leveldb-api (org.iq80.leveldb:leveldb-api:0.12 - http://github.com/dain/leveldb/leveldb-api) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) - * Java Annotation Indexer (org.jboss:jandex:2.1.1.Final - http://www.jboss.org/jandex) + * Java Annotation Indexer (org.jboss:jandex:2.4.2.Final - http://www.jboss.org/jandex) + * JBoss Logging 3 (org.jboss.logging:jboss-logging:3.4.3.Final - http://www.jboss.org) + * JDOM (org.jdom:jdom2:2.0.6.1 - http://www.jdom.org) * jtwig-core (org.jtwig:jtwig-core:5.87.0.RELEASE - http://jtwig.org) * jtwig-reflection (org.jtwig:jtwig-reflection:5.87.0.RELEASE - http://jtwig.org) * jtwig-spring (org.jtwig:jtwig-spring:5.87.0.RELEASE - http://jtwig.org) * jtwig-spring-boot-starter (org.jtwig:jtwig-spring-boot-starter:5.87.0.RELEASE - http://jtwig.org) * jtwig-web (org.jtwig:jtwig-web:5.87.0.RELEASE - http://jtwig.org) * Spatial4J (org.locationtech.spatial4j:spatial4j:0.7 - https://projects.eclipse.org/projects/locationtech.spatial4j) + * MockServer Java Client (org.mock-server:mockserver-client-java:5.11.2 - http://www.mock-server.com) + * MockServer Core (org.mock-server:mockserver-core:5.11.2 - http://www.mock-server.com) + * MockServer JUnit 4 Integration (org.mock-server:mockserver-junit-rule:5.11.2 - http://www.mock-server.com) + * MockServer & Proxy Netty (org.mock-server:mockserver-netty:5.11.2 - http://www.mock-server.com) * MortBay :: Apache EL :: API and Implementation (org.mortbay.jasper:apache-el:8.5.35.1 - https://github.com/jetty-project/jasper-jsp/apache-el) * MortBay :: Apache Jasper :: JSP Implementation (org.mortbay.jasper:apache-jsp:8.5.35.1 - https://github.com/jetty-project/jasper-jsp/apache-jsp) * Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty) * Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester) * Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util) * Servlet Specification API (org.mortbay.jetty:servlet-api:2.5-20081211 - http://jetty.mortbay.org/servlet-api) - * Objenesis (org.objenesis:objenesis:3.1 - http://objenesis.org) + * jwarc (org.netpreserve:jwarc:0.19.0 - https://github.com/iipc/jwarc) + * Objenesis (org.objenesis:objenesis:3.2 - http://objenesis.org/objenesis) * parboiled-core (org.parboiled:parboiled-core:1.3.1 - http://parboiled.org) * parboiled-java (org.parboiled:parboiled-java:1.3.1 - http://parboiled.org) - * quartz (org.quartz-scheduler:quartz:2.3.2 - http://www.quartz-scheduler.org/quartz) - * rome-modules (org.rometools:rome-modules:1.0 - http://www.rometools.org) * RRD4J (org.rrd4j:rrd4j:3.5 - https://github.com/rrd4j/rrd4j/) - * JSONassert (org.skyscreamer:jsonassert:1.5.0 - https://github.com/skyscreamer/JSONassert) - * Spring AOP (org.springframework:spring-aop:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Beans (org.springframework:spring-beans:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Context (org.springframework:spring-context:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Context Support (org.springframework:spring-context-support:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Core (org.springframework:spring-core:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring JDBC (org.springframework:spring-jdbc:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Object/Relational Mapping (org.springframework:spring-orm:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring TestContext Framework (org.springframework:spring-test:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Transaction (org.springframework:spring-tx:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Web (org.springframework:spring-web:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Web MVC (org.springframework:spring-webmvc:5.2.5.RELEASE - https://github.com/spring-projects/spring-framework) - * Spring Boot (org.springframework.boot:spring-boot:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot) - * Spring Boot AutoConfigure (org.springframework.boot:spring-boot-autoconfigure:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-autoconfigure) + * Scala Library (org.scala-lang:scala-library:2.13.9 - https://www.scala-lang.org/) + * Scala Compiler (org.scala-lang:scala-reflect:2.13.0 - https://www.scala-lang.org/) + * scala-collection-compat (org.scala-lang.modules:scala-collection-compat_2.13:2.1.6 - http://www.scala-lang.org/) + * scala-java8-compat (org.scala-lang.modules:scala-java8-compat_2.13:0.9.0 - http://www.scala-lang.org/) + * scala-parser-combinators (org.scala-lang.modules:scala-parser-combinators_2.13:1.1.2 - http://www.scala-lang.org/) + * scala-xml (org.scala-lang.modules:scala-xml_2.13:1.3.0 - http://www.scala-lang.org/) + * JSONassert (org.skyscreamer:jsonassert:1.5.1 - https://github.com/skyscreamer/JSONassert) + * JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.36 - http://www.slf4j.org) + * Spring AOP (org.springframework:spring-aop:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Beans (org.springframework:spring-beans:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Context (org.springframework:spring-context:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Context Support (org.springframework:spring-context-support:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Core (org.springframework:spring-core:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Expression Language (SpEL) (org.springframework:spring-expression:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Commons Logging Bridge (org.springframework:spring-jcl:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring JDBC (org.springframework:spring-jdbc:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Object/Relational Mapping (org.springframework:spring-orm:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring TestContext Framework (org.springframework:spring-test:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Transaction (org.springframework:spring-tx:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Web (org.springframework:spring-web:5.3.27 - https://github.com/spring-projects/spring-framework) + * Spring Web MVC (org.springframework:spring-webmvc:5.3.27 - https://github.com/spring-projects/spring-framework) + * spring-boot (org.springframework.boot:spring-boot:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-actuator (org.springframework.boot:spring-boot-actuator:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-actuator-autoconfigure (org.springframework.boot:spring-boot-actuator-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) * Spring Boot Configuration Processor (org.springframework.boot:spring-boot-configuration-processor:2.0.0.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-tools/spring-boot-configuration-processor) - * Spring Boot Starter (org.springframework.boot:spring-boot-starter:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter) - * Spring Boot AOP Starter (org.springframework.boot:spring-boot-starter-aop:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-aop) - * Spring Boot Data REST Starter (org.springframework.boot:spring-boot-starter-data-rest:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-data-rest) - * Spring Boot Json Starter (org.springframework.boot:spring-boot-starter-json:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-json) - * Spring Boot Log4j 2 Starter (org.springframework.boot:spring-boot-starter-log4j2:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-log4j2) - * Spring Boot Security Starter (org.springframework.boot:spring-boot-starter-security:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-security) - * Spring Boot Test Starter (org.springframework.boot:spring-boot-starter-test:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-test) - * Spring Boot Tomcat Starter (org.springframework.boot:spring-boot-starter-tomcat:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-tomcat) - * Spring Boot Validation Starter (org.springframework.boot:spring-boot-starter-validation:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-validation) - * Spring Boot Web Starter (org.springframework.boot:spring-boot-starter-web:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-starters/spring-boot-starter-web) - * Spring Boot Test (org.springframework.boot:spring-boot-test:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-test) - * Spring Boot Test Auto-Configure (org.springframework.boot:spring-boot-test-autoconfigure:2.2.6.RELEASE - https://projects.spring.io/spring-boot/#/spring-boot-parent/spring-boot-test-autoconfigure) - * Spring Data Core (org.springframework.data:spring-data-commons:2.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-commons) - * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) - * Spring Data REST - HAL Browser (org.springframework.data:spring-data-rest-hal-browser:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-hal-browser) - * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.2.6.RELEASE - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) - * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.0.4.RELEASE - https://github.com/spring-projects/spring-hateoas) + * spring-boot-starter (org.springframework.boot:spring-boot-starter:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-actuator (org.springframework.boot:spring-boot-starter-actuator:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-cache (org.springframework.boot:spring-boot-starter-cache:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-data-rest (org.springframework.boot:spring-boot-starter-data-rest:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-log4j2 (org.springframework.boot:spring-boot-starter-log4j2:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-security (org.springframework.boot:spring-boot-starter-security:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-test (org.springframework.boot:spring-boot-starter-test:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-tomcat (org.springframework.boot:spring-boot-starter-tomcat:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-starter-web (org.springframework.boot:spring-boot-starter-web:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-test (org.springframework.boot:spring-boot-test:2.7.12 - https://spring.io/projects/spring-boot) + * spring-boot-test-autoconfigure (org.springframework.boot:spring-boot-test-autoconfigure:2.7.12 - https://spring.io/projects/spring-boot) + * Spring Data Core (org.springframework.data:spring-data-commons:2.7.12 - https://www.spring.io/spring-data/spring-data-commons) + * Spring Data REST - Core (org.springframework.data:spring-data-rest-core:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-core) + * Spring Data REST - WebMVC (org.springframework.data:spring-data-rest-webmvc:3.7.12 - https://www.spring.io/spring-data/spring-data-rest-parent/spring-data-rest-webmvc) + * Spring HATEOAS (org.springframework.hateoas:spring-hateoas:1.5.4 - https://github.com/spring-projects/spring-hateoas) * Spring Plugin - Core (org.springframework.plugin:spring-plugin-core:2.0.0.RELEASE - https://github.com/spring-projects/spring-plugin/spring-plugin-core) - * spring-security-config (org.springframework.security:spring-security-config:5.2.2.RELEASE - http://spring.io/spring-security) - * spring-security-core (org.springframework.security:spring-security-core:5.2.2.RELEASE - http://spring.io/spring-security) - * spring-security-test (org.springframework.security:spring-security-test:5.2.2.RELEASE - http://spring.io/spring-security) - * spring-security-web (org.springframework.security:spring-security-web:5.2.2.RELEASE - http://spring.io/spring-security) + * spring-security-config (org.springframework.security:spring-security-config:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-core (org.springframework.security:spring-security-core:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-crypto (org.springframework.security:spring-security-crypto:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-test (org.springframework.security:spring-security-test:5.7.8 - https://spring.io/projects/spring-security) + * spring-security-web (org.springframework.security:spring-security-web:5.7.8 - https://spring.io/projects/spring-security) * SWORD v2 :: Common Server Library (org.swordapp:sword2-server:1.0 - http://www.swordapp.org/) - * ISO Parser (org.tallison:isoparser:1.9.41.2 - https://github.com/tballison/mp4parser) - * org.tallison:metadata-extractor (org.tallison:metadata-extractor:2.13.0 - https://drewnoakes.com/code/exif/) - * XMPCore Shaded (org.tallison.xmp:xmpcore-shaded:6.1.10 - https://github.com/tballison) + * snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java) * xml-matchers (org.xmlmatchers:xml-matchers:0.10 - http://code.google.com/p/xml-matchers/) - * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.6.3 - https://www.xmlunit.org/) - * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.6.4 - https://www.xmlunit.org/) - * SnakeYAML (org.yaml:snakeyaml:1.25 - http://www.snakeyaml.org) - * ROME, RSS and atOM utilitiEs for Java (rome:rome:1.0 - https://rome.dev.java.net/) + * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.8.0 - https://www.xmlunit.org/) + * org.xmlunit:xmlunit-core (org.xmlunit:xmlunit-core:2.9.1 - https://www.xmlunit.org/) + * org.xmlunit:xmlunit-placeholders (org.xmlunit:xmlunit-placeholders:2.8.0 - https://www.xmlunit.org/xmlunit-placeholders/) + * SnakeYAML (org.yaml:snakeyaml:1.30 - https://bitbucket.org/snakeyaml/snakeyaml) + * software.amazon.ion:ion-java (software.amazon.ion:ion-java:1.0.2 - https://github.com/amznlabs/ion-java/) + * Xalan Java Serializer (xalan:serializer:2.7.2 - http://xml.apache.org/xalan-j/) * xalan (xalan:xalan:2.7.0 - no url defined) - * Xerces2-j (xerces:xercesImpl:2.12.0 - https://xerces.apache.org/xerces2-j/) + * Xalan Java (xalan:xalan:2.7.2 - http://xml.apache.org/xalan-j/) + * Xerces2-j (xerces:xercesImpl:2.12.2 - https://xerces.apache.org/xerces2-j/) * XML Commons External Components XML APIs (xml-apis:xml-apis:1.4.01 - http://xml.apache.org/commons/components/external/) - Apache-2.0: - - * snappy-java (org.xerial.snappy:snappy-java:1.1.7.6 - https://github.com/xerial/snappy-java) - - BSD 2-Clause: - - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - - BSD 3-clause License w/nuclear disclaimer: - - * Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core) - - BSD 3-clause New License: - - * dom4j (org.dom4j:dom4j:2.1.1 - http://dom4j.github.io/) - - BSD Licence 3: - - * Hamcrest (org.hamcrest:hamcrest:2.1 - http://hamcrest.org/JavaHamcrest/) - BSD License: * AntLR Parser Generator (antlr:antlr:2.7.7 - http://www.antlr.org/) + * Adobe XMPCore (com.adobe.xmp:xmpcore:6.1.11 - https://www.adobe.com/devnet/xmp/library/eula-xmp-library-java.html) * coverity-escapers (com.coverity.security:coverity-escapers:1.1.1 - http://coverity.com/security) + * Java Advanced Imaging Image I/O Tools API core (standalone) (com.github.jai-imageio:jai-imageio-core:1.4.0 - https://github.com/jai-imageio/jai-imageio-core) * JSONLD Java :: Core (com.github.jsonld-java:jsonld-java:0.5.1 - http://github.com/jsonld-java/jsonld-java/jsonld-java/) - * curvesapi (com.github.virtuald:curvesapi:1.06 - https://github.com/virtuald/curvesapi) + * curvesapi (com.github.virtuald:curvesapi:1.07 - https://github.com/virtuald/curvesapi) + * Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.11.0 - https://developers.google.com/protocol-buffers/protobuf-java/) + * JZlib (com.jcraft:jzlib:1.1.3 - http://www.jcraft.com/jzlib/) * dnsjava (dnsjava:dnsjava:2.1.7 - http://www.dnsjava.org) - * Units of Measurement API (javax.measure:unit-api:1.0 - http://unitsofmeasurement.github.io/) * jaxen (jaxen:jaxen:1.1.6 - http://jaxen.codehaus.org/) - * JLine (jline:jline:0.9.94 - http://jline.sourceforge.net) * ANTLR 4 Runtime (org.antlr:antlr4-runtime:4.5.1-1 - http://www.antlr.org/antlr4-runtime) * commons-compiler (org.codehaus.janino:commons-compiler:3.0.9 - http://janino-compiler.github.io/commons-compiler/) * janino (org.codehaus.janino:janino:3.0.9 - http://janino-compiler.github.io/janino/) - * Stax2 API (org.codehaus.woodstox:stax2-api:3.1.4 - http://wiki.fasterxml.com/WoodstoxStax2) + * Stax2 API (org.codehaus.woodstox:stax2-api:4.2.1 - http://github.com/FasterXML/stax2-api) + * Hamcrest Date (org.exparity:hamcrest-date:2.0.7 - https://github.com/exparity/hamcrest-date) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * Hamcrest (org.hamcrest:hamcrest:2.2 - http://hamcrest.org/JavaHamcrest/) * Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all) * Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core) - * Hamcrest library (org.hamcrest:hamcrest-library:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-library) - * JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org) + * HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/) + * JBibTeX (org.jbibtex:jbibtex:1.0.20 - http://www.jbibtex.org) + * asm (org.ow2.asm:asm:8.0.1 - http://asm.ow2.io/) * asm-analysis (org.ow2.asm:asm-analysis:7.1 - http://asm.ow2.org/) + * asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/) * asm-tree (org.ow2.asm:asm-tree:7.1 - http://asm.ow2.org/) * asm-util (org.ow2.asm:asm-util:7.1 - http://asm.ow2.org/) + * PostgreSQL JDBC Driver (org.postgresql:postgresql:42.6.0 - https://jdbc.postgresql.org) + * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) * JMatIO (org.tallison:jmatio:1.5 - https://github.com/tballison/jmatio) * XMLUnit for Java (xmlunit:xmlunit:1.3 - http://xmlunit.sourceforge.net/) - BSD-2-Clause: - - * PostgreSQL JDBC Driver - JDBC 4.2 (org.postgresql:postgresql:42.2.9 - https://github.com/pgjdbc/pgjdbc) - - BSD-3-Clause: - - * asm (org.ow2.asm:asm:8.0.1 - http://asm.ow2.io/) - * asm-commons (org.ow2.asm:asm-commons:8.0.1 - http://asm.ow2.io/) - - Bouncy Castle Licence: - - * Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.65 - http://www.bouncycastle.org/java.html) - * Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.65 - http://www.bouncycastle.org/java.html) - * Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15on:1.65 - http://www.bouncycastle.org/java.html) + CC0: - CDDL/GPLv2+CE: - - * JavaBeans Activation Framework (com.sun.activation:javax.activation:1.2.0 - http://java.net/all/javax.activation/) - * JavaBeans Activation Framework API jar (javax.activation:javax.activation-api:1.2.0 - http://java.net/all/javax.activation-api/) + * reactive-streams (org.reactivestreams:reactive-streams:1.0.2 - http://www.reactive-streams.org/) Common Development and Distribution License (CDDL): * istack common utility code runtime (com.sun.istack:istack-commons-runtime:3.0.7 - http://java.net/istack-commons/istack-commons-runtime/) + * JavaMail API (com.sun.mail:javax.mail:1.6.2 - http://javaee.github.io/javamail/javax.mail) + * JavaMail API (no providers) (com.sun.mail:mailapi:1.6.2 - http://javaee.github.io/javamail/mailapi) + * Old JAXB Core (com.sun.xml.bind:jaxb-core:2.3.0.1 - http://jaxb.java.net/jaxb-bundles/jaxb-core) + * Old JAXB Runtime (com.sun.xml.bind:jaxb-impl:2.3.1 - http://jaxb.java.net/jaxb-bundles/jaxb-impl) * Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca) * jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api) * JavaBeans Activation Framework (JAF) (javax.activation:activation:1.1 - http://java.sun.com/products/javabeans/jaf/index.jsp) + * JavaBeans Activation Framework API jar (javax.activation:javax.activation-api:1.2.0 - http://java.net/all/javax.activation-api/) * javax.annotation API (javax.annotation:javax.annotation-api:1.3.2 - http://jcp.org/en/jsr/detail?id=250) - * JavaMail API (compat) (javax.mail:mail:1.4.7 - http://kenai.com/projects/javamail/mail) * Java Servlet API (javax.servlet:javax.servlet-api:3.1.0 - http://servlet-spec.java.net) * javax.transaction API (javax.transaction:javax.transaction-api:1.3 - http://jta-spec.java.net) * jaxb-api (javax.xml.bind:jaxb-api:2.3.1 - https://github.com/javaee/jaxb-spec/jaxb-api) - * JAX-WS API (javax.xml.ws:jaxws-api:2.3.1 - https://github.com/javaee/jax-ws-spec) - * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) + * JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight) * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) * HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils) @@ -457,10 +476,10 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines * javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject) * JAXB Runtime (org.glassfish.jaxb:jaxb-runtime:2.3.1 - http://jaxb.java.net/jaxb-runtime-parent/jaxb-runtime) * TXW2 Runtime (org.glassfish.jaxb:txw2:2.3.1 - http://jaxb.java.net/jaxb-txw-parent/txw2) - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) * Java Transaction API (org.jboss.spec.javax.transaction:jboss-transaction-api_1.2_spec:1.1.1.Final - http://www.jboss.org/jboss-transaction-api_1.2_spec) - * MIME streaming extension (org.jvnet.mimepull:mimepull:1.9.7 - http://mimepull.java.net) + * Extended StAX API (org.jvnet.staxex:stax-ex:1.8 - http://stax-ex.java.net/) Cordra (Version 2) License Agreement: @@ -468,123 +487,91 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines * net.cnri:cnri-servlet-container-lib (net.cnri:cnri-servlet-container-lib:3.0.0 - https://gitlab.com/cnri/cnri-servlet-container) * net.cnri:cnriutil (net.cnri:cnriutil:2.0 - https://gitlab.com/cnri/cnriutil) - Dual license consisting of the CDDL v1.1 and GPL v2: - - * saaj-impl (com.sun.xml.messaging.saaj:saaj-impl:1.4.0-b03 - http://java.net/saaj-impl/) - * Extended StAX API (org.jvnet.staxex:stax-ex:1.8 - http://stax-ex.java.net/) - - EDL 1.0: + Eclipse Distribution License, Version 1.0: - * JavaBeans Activation Framework (com.sun.activation:jakarta.activation:1.2.1 - https://github.com/eclipse-ee4j/jaf/jakarta.activation) - * JavaBeans Activation Framework API jar (jakarta.activation:jakarta.activation-api:1.2.1 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api) * Jakarta Activation API jar (jakarta.activation:jakarta.activation-api:1.2.2 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api) - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - - EPL 2.0: - - * Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca) - * jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api) - * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) - * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) - * HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils) - * OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator) - * aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged) - * javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject) - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - - Eclipse Distribution License (EDL), Version 1.0: - - * Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Final - http://hibernate.org) - - Eclipse Distribution License - v 1.0: - - * jakarta.xml.bind-api (jakarta.xml.bind:jakarta.xml.bind-api:2.3.2 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api) * Jakarta XML Binding API (jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api) - - Eclipse Distribution License v. 1.0: - * javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org) Eclipse Public License: - * AspectJ runtime (org.aspectj:aspectjrt:1.8.0 - http://www.aspectj.org) - * AspectJ weaver (org.aspectj:aspectjweaver:1.9.5 - http://www.aspectj.org) + * System Rules (com.github.stefanbirkner:system-rules:1.19.0 - http://stefanbirkner.github.io/system-rules/) + * H2 Database Engine (com.h2database:h2:2.1.210 - https://h2database.com) + * Jakarta Annotations API (jakarta.annotation:jakarta.annotation-api:1.3.5 - https://projects.eclipse.org/projects/ee4j.ca) + * jakarta.ws.rs-api (jakarta.ws.rs:jakarta.ws.rs-api:2.1.6 - https://github.com/eclipse-ee4j/jaxrs-api) + * javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec) + * JUnit (junit:junit:4.13.1 - http://junit.org) + * AspectJ Weaver (org.aspectj:aspectjweaver:1.9.7 - https://www.eclipse.org/aspectj/) * Eclipse Compiler for Java(TM) (org.eclipse.jdt:ecj:3.14.0 - http://www.eclipse.org/jdt) * Jetty :: Apache JSP Implementation (org.eclipse.jetty:apache-jsp:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Apache :: JSTL module (org.eclipse.jetty:apache-jstl:9.4.15.v20190215 - http://tomcat.apache.org/taglibs/standard/) - * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) - * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) - * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) - * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Client (org.eclipse.jetty:jetty-alpn-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-client) + * Jetty :: ALPN :: JDK9 Client Implementation (org.eclipse.jetty:jetty-alpn-java-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-client) + * Jetty :: ALPN :: JDK9 Server Implementation (org.eclipse.jetty:jetty-alpn-java-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-java-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) + * Jetty :: ALPN :: Server (org.eclipse.jetty:jetty-alpn-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-alpn-parent/jetty-alpn-server) * Jetty :: Servlet Annotations (org.eclipse.jetty:jetty-annotations:9.4.15.v20190215 - http://www.eclipse.org/jetty) - * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-client) - * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-continuation) - * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-deploy) - * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-http) - * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-io) - * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-jmx) + * Jetty :: Asynchronous HTTP Client (org.eclipse.jetty:jetty-client:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-client) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Continuation (org.eclipse.jetty:jetty-continuation:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-continuation) + * Jetty :: Deployers (org.eclipse.jetty:jetty-deploy:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-deploy) + * Jetty :: Http Utility (org.eclipse.jetty:jetty-http:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-http) + * Jetty :: IO Utility (org.eclipse.jetty:jetty-io:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-io) + * Jetty :: JMX Management (org.eclipse.jetty:jetty-jmx:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-jmx) * Jetty :: JNDI Naming (org.eclipse.jetty:jetty-jndi:9.4.15.v20190215 - http://www.eclipse.org/jetty) * Jetty :: Plus (org.eclipse.jetty:jetty-plus:9.4.15.v20190215 - http://www.eclipse.org/jetty) - * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-rewrite) - * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-security) - * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-server) - * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlet) - * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-servlets) - * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.41.v20210516 - https://eclipse.org/jetty/jetty-util) - * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-webapp) - * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.34.v20201102 - https://eclipse.org/jetty/jetty-xml) - * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-client) - * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-common) - * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-hpack) - * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) - * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.34.v20201102 - https://eclipse.org/jetty/http2-parent/http2-server) + * Jetty :: Rewrite Handler (org.eclipse.jetty:jetty-rewrite:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-rewrite) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.44.v20210927 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Security (org.eclipse.jetty:jetty-security:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-security) + * Jetty :: Server Core (org.eclipse.jetty:jetty-server:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-server) + * Jetty :: Servlet Handling (org.eclipse.jetty:jetty-servlet:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlet) + * Jetty :: Utility Servlets and Filters (org.eclipse.jetty:jetty-servlets:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-servlets) + * Jetty :: Utilities (org.eclipse.jetty:jetty-util:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util) + * Jetty :: Utilities :: Ajax(JSON) (org.eclipse.jetty:jetty-util-ajax:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-util-ajax) + * Jetty :: Webapp Application Support (org.eclipse.jetty:jetty-webapp:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-webapp) + * Jetty :: XML utilities (org.eclipse.jetty:jetty-xml:9.4.51.v20230217 - https://eclipse.org/jetty/jetty-xml) + * Jetty :: HTTP2 :: Client (org.eclipse.jetty.http2:http2-client:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-client) + * Jetty :: HTTP2 :: Common (org.eclipse.jetty.http2:http2-common:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-common) + * Jetty :: HTTP2 :: HPACK (org.eclipse.jetty.http2:http2-hpack:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-hpack) + * Jetty :: HTTP2 :: HTTP Client Transport (org.eclipse.jetty.http2:http2-http-client-transport:9.4.44.v20210927 - https://eclipse.org/jetty/http2-parent/http2-http-client-transport) + * Jetty :: HTTP2 :: Server (org.eclipse.jetty.http2:http2-server:9.4.51.v20230217 - https://eclipse.org/jetty/http2-parent/http2-server) * Jetty :: Schemas (org.eclipse.jetty.toolchain:jetty-schemas:3.1.2 - https://eclipse.org/jetty/jetty-schemas) + * HK2 API module (org.glassfish.hk2:hk2-api:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-api) + * ServiceLocator Default Implementation (org.glassfish.hk2:hk2-locator:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-locator) + * HK2 Implementation Utilities (org.glassfish.hk2:hk2-utils:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/hk2-utils) + * OSGi resource locator (org.glassfish.hk2:osgi-resource-locator:1.0.3 - https://projects.eclipse.org/projects/ee4j/osgi-resource-locator) + * aopalliance version 1.0 repackaged as a module (org.glassfish.hk2.external:aopalliance-repackaged:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/aopalliance-repackaged) + * javax.inject:1 as OSGi bundle (org.glassfish.hk2.external:jakarta.inject:2.6.1 - https://github.com/eclipse-ee4j/glassfish-hk2/external/jakarta.inject) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.2.Final - http://hibernate.org) * Jetty Server (org.mortbay.jetty:jetty:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/modules/jetty) * Jetty Servlet Tester (org.mortbay.jetty:jetty-servlet-tester:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-servlet-tester) * Jetty Utilities (org.mortbay.jetty:jetty-util:6.1.26 - http://www.eclipse.org/jetty/jetty-parent/project/jetty-util) - Eclipse Public License (EPL), Version 1.0: - - * Java Persistence API, Version 2.1 (org.hibernate.javax.persistence:hibernate-jpa-2.1-api:1.0.0.Final - http://hibernate.org) - - Eclipse Public License 1.0: - - * JUnit (junit:junit:4.13.1 - http://junit.org) - - Eclipse Public License v1.0: - - * javax.persistence-api (javax.persistence:javax.persistence-api:2.2 - https://github.com/javaee/jpa-spec) - - Eclipse Public License, Version 1.0: - - * c3p0 (com.mchange:c3p0:0.9.5.5 - https://github.com/swaldman/c3p0) - * mchange-commons-java (com.mchange:mchange-commons-java:0.2.19 - https://github.com/swaldman/mchange-commons-java) - - GNU General Public License, Version 2 with the Classpath Exception: - - * Java Transaction API (org.jboss.spec.javax.transaction:jboss-transaction-api_1.2_spec:1.1.1.Final - http://www.jboss.org/jboss-transaction-api_1.2_spec) - GNU Lesser General Public License (LGPL): - * SpotBugs Annotations (com.github.spotbugs:spotbugs-annotations:3.1.9 - https://spotbugs.github.io/) + * btf (com.github.java-json-tools:btf:1.3 - https://github.com/java-json-tools/btf) + * jackson-coreutils (com.github.java-json-tools:jackson-coreutils:2.0 - https://github.com/java-json-tools/jackson-coreutils) + * jackson-coreutils-equivalence (com.github.java-json-tools:jackson-coreutils-equivalence:1.0 - https://github.com/java-json-tools/jackson-coreutils) + * json-schema-core (com.github.java-json-tools:json-schema-core:1.2.14 - https://github.com/java-json-tools/json-schema-core) + * json-schema-validator (com.github.java-json-tools:json-schema-validator:2.2.14 - https://github.com/java-json-tools/json-schema-validator) + * msg-simple (com.github.java-json-tools:msg-simple:1.2 - https://github.com/java-json-tools/msg-simple) + * uri-template (com.github.java-json-tools:uri-template:0.10 - https://github.com/java-json-tools/uri-template) * FindBugs-Annotations (com.google.code.findbugs:annotations:3.0.1u2 - http://findbugs.sourceforge.net/) - * c3p0 (com.mchange:c3p0:0.9.5.5 - https://github.com/swaldman/c3p0) - * mchange-commons-java (com.mchange:mchange-commons-java:0.2.19 - https://github.com/swaldman/mchange-commons-java) - * JHighlight (org.codelibs:jhighlight:1.0.3 - https://github.com/codelibs/jhighlight) + * JHighlight (org.codelibs:jhighlight:1.1.0 - https://github.com/codelibs/jhighlight) + * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.6.15.Final - https://hibernate.org/orm) + * Hibernate ORM - hibernate-jcache (org.hibernate:hibernate-jcache:5.6.15.Final - https://hibernate.org/orm) + * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.6.15.Final - https://hibernate.org/orm) + * Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.2.Final - http://hibernate.org) * im4java (org.im4java:im4java:1.4.0 - http://sourceforge.net/projects/im4java/) - * JacORB OMG-API (org.jacorb:jacorb-omgapi:3.9 - http://www.jacorb.org) * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) * XOM (xom:xom:1.2.5 - http://xom.nu) - - GNU Library General Public License v2.1 or later: - - * Hibernate ORM - hibernate-core (org.hibernate:hibernate-core:5.4.10.Final - http://hibernate.org/orm) - * Hibernate ORM - hibernate-ehcache (org.hibernate:hibernate-ehcache:5.4.10.Final - http://hibernate.org/orm) - * Hibernate ORM - hibernate-jpamodelgen (org.hibernate:hibernate-jpamodelgen:5.4.10.Final - http://hibernate.org/orm) - * Hibernate Commons Annotations (org.hibernate.common:hibernate-commons-annotations:5.1.0.Final - http://hibernate.org) + * XOM (xom:xom:1.3.7 - https://xom.nu) Go License: @@ -594,89 +581,61 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines * Handle Server (net.handle:handle:9.3.0 - https://www.handle.net) - JDOM License (Apache-style license): - - * jdom (jdom:jdom:1.0 - no url defined) - - LGPL, version 2.1: - - * Java Native Access (net.java.dev.jna:jna:5.5.0 - https://github.com/java-native-access/jna) - - MIT: - - * toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org) - * jquery (org.webjars.bowergithub.jquery:jquery-dist:3.5.1 - https://www.webjars.org) - * bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.5.2 - https://www.webjars.org) - MIT License: + * better-files (com.github.pathikrit:better-files_2.13:3.9.1 - https://github.com/pathikrit/better-files) * Java SemVer (com.github.zafarkhaja:java-semver:0.9.0 - https://github.com/zafarkhaja/jsemver) + * dd-plist (com.googlecode.plist:dd-plist:1.25 - http://www.github.com/3breadt/dd-plist) + * DigitalCollections: IIIF API Library (de.digitalcollections.iiif:iiif-apis:0.3.9 - https://github.com/dbmdz/iiif-apis) + * s3mock (io.findify:s3mock_2.13:0.2.6 - https://github.com/findify/s3mock) + * JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple) + * Bouncy Castle S/MIME API (org.bouncycastle:bcmail-jdk15on:1.70 - https://www.bouncycastle.org/java.html) + * Bouncy Castle PKIX, CMS, EAC, TSP, PKCS, OCSP, CMP, and CRMF APIs (org.bouncycastle:bcpkix-jdk15on:1.70 - https://www.bouncycastle.org/java.html) + * Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15on:1.70 - https://www.bouncycastle.org/java.html) + * Bouncy Castle ASN.1 Extension and Utility APIs (org.bouncycastle:bcutil-jdk15on:1.70 - https://www.bouncycastle.org/java.html) * org.brotli:dec (org.brotli:dec:0.1.2 - http://brotli.org/dec) - * Checker Qual (org.checkerframework:checker-qual:3.5.0 - https://checkerframework.org) - * jsoup Java HTML Parser (org.jsoup:jsoup:1.13.1 - https://jsoup.org/) - * mockito-core (org.mockito:mockito-core:3.8.0 - https://github.com/mockito/mockito) - * mockito-inline (org.mockito:mockito-inline:3.8.0 - https://github.com/mockito/mockito) + * Checker Qual (org.checkerframework:checker-qual:3.10.0 - https://checkerframework.org) + * Checker Qual (org.checkerframework:checker-qual:3.31.0 - https://checkerframework.org) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * mockito-core (org.mockito:mockito-core:3.12.4 - https://github.com/mockito/mockito) + * mockito-inline (org.mockito:mockito-inline:3.12.4 - https://github.com/mockito/mockito) * ORCID - Model (org.orcid:orcid-model:3.0.2 - http://github.com/ORCID/orcid-model) - * JCL 1.2 implemented over SLF4J (org.slf4j:jcl-over-slf4j:1.7.25 - http://www.slf4j.org) - * JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.25 - http://www.slf4j.org) - * SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org) - - MIT License (MIT): - - * Itadaki jbzip2 (org.itadaki:bzip2:0.9.1 - https://code.google.com/p/jbzip2/) - - MIT license: - - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - - Modified BSD: - - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * JUL to SLF4J bridge (org.slf4j:jul-to-slf4j:1.7.36 - http://www.slf4j.org) + * SLF4J API Module (org.slf4j:slf4j-api:1.7.36 - http://www.slf4j.org) + * SLF4J Extensions Module (org.slf4j:slf4j-ext:1.7.28 - http://www.slf4j.org) + * HAL Browser (org.webjars:hal-browser:ad9b865 - http://webjars.org) + * toastr (org.webjars.bowergithub.codeseven:toastr:2.1.4 - http://webjars.org) + * backbone (org.webjars.bowergithub.jashkenas:backbone:1.4.1 - https://www.webjars.org) + * underscore (org.webjars.bowergithub.jashkenas:underscore:1.13.2 - https://www.webjars.org) + * jquery (org.webjars.bowergithub.jquery:jquery-dist:3.6.0 - https://www.webjars.org) + * urijs (org.webjars.bowergithub.medialize:uri.js:1.19.10 - https://www.webjars.org) + * bootstrap (org.webjars.bowergithub.twbs:bootstrap:4.6.1 - https://www.webjars.org) + * core-js (org.webjars.npm:core-js:3.30.1 - https://www.webjars.org) + * @json-editor/json-editor (org.webjars.npm:json-editor__json-editor:2.6.1 - https://www.webjars.org) Mozilla Public License: * juniversalchardet (com.googlecode.juniversalchardet:juniversalchardet:1.0.3 - http://juniversalchardet.googlecode.com/) - * h2 (com.h2database:h2:1.4.187 - no url defined) - * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) - - Mozilla Public License Version 2.0: - + * H2 Database Engine (com.h2database:h2:2.1.210 - https://h2database.com) * Saxon-HE (net.sf.saxon:Saxon-HE:9.8.0-14 - http://www.saxonica.com/) - - OGC copyright: - - * GeoAPI (org.opengis:geoapi:3.0.1 - http://www.geoapi.org/geoapi/) + * Javassist (org.javassist:javassist:3.25.0-GA - http://www.javassist.org/) + * Mozilla Rhino (org.mozilla:rhino:1.7.7.2 - https://developer.mozilla.org/en/Rhino) Public Domain: - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - * XZ for Java (org.tukaani:xz:1.8 - https://tukaani.org/xz/java.html) - - Similar to Apache License but with the acknowledgment clause removed: - - * JDOM (org.jdom:jdom:1.1.3 - http://www.jdom.org) - * JDOM (org.jdom:jdom2:2.0.6 - http://www.jdom.org) - - The Apache License, Version 2.0: - - * Woodstox (com.fasterxml.woodstox:woodstox-core:5.0.3 - https://github.com/FasterXML/woodstox) - * SentimentAnalysisParser (edu.usc.ir:sentiment-analysis-parser:0.1 - https://github.com/USCDataScience/SentimentAnalysisParser) - - The GNU General Public License (GPL), Version 2, With Classpath Exception: - - * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) - - The JSON License: - - * JSON in Java (org.json:json:20180130 - https://github.com/douglascrockford/JSON-java) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-core-common (org.glassfish.jersey.core:jersey-common:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-common) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/) + * JSON in Java (org.json:json:20230227 - https://github.com/douglascrockford/JSON-java) + * LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/) + * Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) + * XZ for Java (org.tukaani:xz:1.9 - https://tukaani.org/xz/java.html) UnRar License: - * Java UnRar (com.github.junrar:junrar:4.0.0 - https://github.com/junrar/junrar) + * Java Unrar (com.github.junrar:junrar:7.5.3 - https://github.com/junrar/junrar) Unicode/ICU License: @@ -684,14 +643,10 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines W3C license: - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) jQuery license: - * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) - * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.30.1 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) - - lgpl: - - * Java RMI API (org.jboss.spec.javax.rmi:jboss-rmi-api_1.0_spec:1.0.6.Final - http://www.jboss.org/jboss-rmi-api_1.0_spec) + * jersey-core-client (org.glassfish.jersey.core:jersey-client:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/jersey-client) + * jersey-inject-hk2 (org.glassfish.jersey.inject:jersey-hk2:2.35 - https://projects.eclipse.org/projects/ee4j.jersey/project/jersey-hk2) diff --git a/NOTICE b/NOTICE index 6743fea511e7..010c89a4bbf6 100644 --- a/NOTICE +++ b/NOTICE @@ -1,3 +1,13 @@ +Licenses of Third-Party Libraries +================================= + +DSpace uses third-party libraries which may be distributed under +different licenses than specified in our LICENSE file. Information +about these licenses is detailed in the LICENSES_THIRD_PARTY file at +the root of the source tree. You must agree to the terms of these +licenses, in addition to the DSpace source code license, in order to +use this software. + Licensing Notices ================= diff --git a/README.md b/README.md index caca04d0a9e2..af9158eff361 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ Documentation for each release may be viewed online or downloaded via our [Docum The latest DSpace Installation instructions are available at: https://wiki.lyrasis.org/display/DSDOC7x/Installing+DSpace -Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle) +Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL) and a servlet container (usually Tomcat) in order to function. More information about these and all other prerequisites can be found in the Installation instructions above. @@ -48,18 +48,7 @@ See [Running DSpace 7 with Docker Compose](dspace/src/main/docker-compose/README ## Contributing -DSpace is a community built and supported project. We do not have a centralized development or support team, -but have a dedicated group of volunteers who help us improve the software, documentation, resources, etc. - -We welcome contributions of any type. Here's a few basic guides that provide suggestions for contributing to DSpace: -* [How to Contribute to DSpace](https://wiki.lyrasis.org/display/DSPACE/How+to+Contribute+to+DSpace): How to contribute in general (via code, documentation, bug reports, expertise, etc) -* [Code Contribution Guidelines](https://wiki.lyrasis.org/display/DSPACE/Code+Contribution+Guidelines): How to give back code or contribute features, bug fixes, etc. -* [DSpace Community Advisory Team (DCAT)](https://wiki.lyrasis.org/display/cmtygp/DSpace+Community+Advisory+Team): If you are not a developer, we also have an interest group specifically for repository managers. The DCAT group meets virtually, once a month, and sends open invitations to join their meetings via the [DCAT mailing list](https://groups.google.com/d/forum/DSpaceCommunityAdvisoryTeam). - -We also encourage GitHub Pull Requests (PRs) at any time. Please see our [Development with Git](https://wiki.lyrasis.org/display/DSPACE/Development+with+Git) guide for more info. - -In addition, a listing of all known contributors to DSpace software can be -found online at: https://wiki.lyrasis.org/display/DSPACE/DSpaceContributors +See [Contributing documentation](CONTRIBUTING.md) ## Getting Help @@ -136,3 +125,6 @@ run automatically by [GitHub Actions](https://github.com/DSpace/DSpace/actions?q DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause). The full license is available in the [LICENSE](LICENSE) file or online at http://www.dspace.org/license/ + +DSpace uses third-party libraries which may be distributed under different licenses. Those licenses are listed +in the [LICENSES_THIRD_PARTY](LICENSES_THIRD_PARTY) file. diff --git a/checkstyle.xml b/checkstyle.xml index 815edaec7bf0..e0fa808d83cb 100644 --- a/checkstyle.xml +++ b/checkstyle.xml @@ -92,9 +92,7 @@ For more information on CheckStyle configurations below, see: http://checkstyle. - - - + diff --git a/docker-compose-cli.yml b/docker-compose-cli.yml index 1a53f361894e..b2c6df636b64 100644 --- a/docker-compose-cli.yml +++ b/docker-compose-cli.yml @@ -1,25 +1,38 @@ version: "3.7" - +networks: + # Default to using network named 'dspacenet' from docker-compose.yml. + # Its full name will be prepended with the project name (e.g. "-p d7" means it will be named "d7_dspacenet") + default: + name: ${COMPOSE_PROJECT_NAME}_dspacenet + external: true services: dspace-cli: - image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-dspace-7_x}" + image: "${DOCKER_OWNER:-dspace}/dspace-cli:${DSPACE_VER:-latest}" container_name: dspace-cli build: context: . dockerfile: Dockerfile.cli - #environment: + environment: + # Below syntax may look odd, but it is how to override dspace.cfg settings via env variables. + # See https://github.com/DSpace/DSpace/blob/main/dspace/config/config-definition.xml + # __P__ => "." (e.g. dspace__P__dir => dspace.dir) + # __D__ => "-" (e.g. google__D__metadata => google-metadata) + # dspace.dir: Must match with Dockerfile's DSPACE_INSTALL directory. + dspace__P__dir: /dspace + # db.url: Ensure we are using the 'dspacedb' image for our database + db__P__url: 'jdbc:postgresql://dspacedb:5432/dspace' + # solr.server: Ensure we are using the 'dspacesolr' image for Solr + solr__P__server: http://dspacesolr:8983/solr volumes: - - ./dspace/src/main/docker-compose/local.cfg:/dspace/config/local.cfg + # Keep DSpace assetstore directory between reboots - assetstore:/dspace/assetstore + # Mount local [src]/dspace/config/ to container. This syncs your local configs with container + # NOTE: Environment variables specified above will OVERRIDE any configs in local.cfg or dspace.cfg + - ./dspace/config:/dspace/config entrypoint: /dspace/bin/dspace command: help - networks: - - dspacenet tty: true stdin_open: true volumes: assetstore: - -networks: - dspacenet: diff --git a/docker-compose.yml b/docker-compose.yml index 5d80d95090cf..980e88d04fd4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,30 +4,54 @@ networks: ipam: config: # Define a custom subnet for our DSpace network, so that we can easily trust requests from host to container. - # If you customize this value, be sure to customize the 'proxies.trusted.ipranges' in your local.cfg. + # If you customize this value, be sure to customize the 'proxies.trusted.ipranges' env variable below. - subnet: 172.23.0.0/16 services: # DSpace (backend) webapp container dspace: container_name: dspace - image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}" + environment: + # Below syntax may look odd, but it is how to override dspace.cfg settings via env variables. + # See https://github.com/DSpace/DSpace/blob/main/dspace/config/config-definition.xml + # __P__ => "." (e.g. dspace__P__dir => dspace.dir) + # __D__ => "-" (e.g. google__D__metadata => google-metadata) + # dspace.dir: Must match with Dockerfile's DSPACE_INSTALL directory. + dspace__P__dir: /dspace + # Uncomment to set a non-default value for dspace.server.url or dspace.ui.url + # dspace__P__server__P__url: http://localhost:8080/server + # dspace__P__ui__P__url: http://localhost:4000 + dspace__P__name: 'DSpace Started with Docker Compose' + # db.url: Ensure we are using the 'dspacedb' image for our database + db__P__url: 'jdbc:postgresql://dspacedb:5432/dspace' + # solr.server: Ensure we are using the 'dspacesolr' image for Solr + solr__P__server: http://dspacesolr:8983/solr + # proxies.trusted.ipranges: This setting is required for a REST API running in Docker to trust requests + # from the host machine. This IP range MUST correspond to the 'dspacenet' subnet defined above. + proxies__P__trusted__P__ipranges: '172.23.0' + LOGGING_CONFIG: /dspace/config/log4j2-container.xml + image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-latest-test}" build: context: . dockerfile: Dockerfile.test depends_on: - dspacedb networks: - dspacenet: + - dspacenet ports: - published: 8080 target: 8080 - published: 8009 target: 8009 + - published: 8000 + target: 8000 stdin_open: true tty: true volumes: + # Keep DSpace assetstore directory between reboots - assetstore:/dspace/assetstore - - ./dspace/src/main/docker-compose/local.cfg:/dspace/config/local.cfg + # Mount local [src]/dspace/config/ to container. This syncs your local configs with container + # NOTE: Environment variables specified above will OVERRIDE any configs in local.cfg or dspace.cfg + - ./dspace/config:/dspace/config # Ensure that the database is ready BEFORE starting tomcat # 1. While a TCP connection to dspacedb port 5432 is not available, continue to sleep # 2. Then, run database migration to init database tables @@ -39,13 +63,17 @@ services: while (! /dev/null 2>&1; do sleep 1; done; /dspace/bin/dspace database migrate catalina.sh run - # DSpace database container + # DSpace PostgreSQL database container dspacedb: container_name: dspacedb + # Uses a custom Postgres image with pgcrypto installed + image: "${DOCKER_OWNER:-dspace}/dspace-postgres-pgcrypto:${DSPACE_VER:-latest}" + build: + # Must build out of subdirectory to have access to install script for pgcrypto + context: ./dspace/src/main/docker/dspace-postgres-pgcrypto/ environment: PGDATA: /pgdata - # Uses a custom Postgres image with pgcrypto installed - image: dspace/dspace-postgres-pgcrypto + POSTGRES_PASSWORD: dspace networks: dspacenet: ports: @@ -54,12 +82,19 @@ services: stdin_open: true tty: true volumes: + # Keep Postgres data directory between reboots - pgdata:/pgdata # DSpace Solr container dspacesolr: container_name: dspacesolr - # Uses official Solr image at https://hub.docker.com/_/solr/ - image: solr:8.8 + image: "${DOCKER_OWNER:-dspace}/dspace-solr:${DSPACE_VER:-latest}" + build: + context: ./dspace/src/main/docker/dspace-solr/ + # Provide path to Solr configs necessary to build Docker image + additional_contexts: + solrconfigs: ./dspace/solr/ + args: + SOLR_VERSION: "${SOLR_VER:-8.11}" networks: dspacenet: ports: @@ -69,23 +104,27 @@ services: tty: true working_dir: /var/solr/data volumes: - # Mount our local Solr core configs so that they are available as Solr configsets on container - - ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority - - ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai - - ./dspace/solr/search:/opt/solr/server/solr/configsets/search - - ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics # Keep Solr data directory between reboots - solr_data:/var/solr/data - # Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr + # Initialize all DSpace Solr cores then start Solr: + # * First, run precreate-core to create the core (if it doesn't yet exist). If exists already, this is a no-op + # * Second, copy configsets to this core: + # Updates to Solr configs require the container to be rebuilt/restarted: `docker compose -p d7 up -d --build dspacesolr` entrypoint: - /bin/bash - '-c' - | init-var-solr precreate-core authority /opt/solr/server/solr/configsets/authority + cp -r /opt/solr/server/solr/configsets/authority/* authority precreate-core oai /opt/solr/server/solr/configsets/oai + cp -r /opt/solr/server/solr/configsets/oai/* oai precreate-core search /opt/solr/server/solr/configsets/search + cp -r /opt/solr/server/solr/configsets/search/* search precreate-core statistics /opt/solr/server/solr/configsets/statistics + cp -r /opt/solr/server/solr/configsets/statistics/* statistics + precreate-core qaevent /opt/solr/server/solr/configsets/qaevent + cp -r /opt/solr/server/solr/configsets/qaevent/* qaevent exec solr -f volumes: assetstore: diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index 03685d53bd9e..f6b06f69edb7 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -12,7 +12,7 @@ org.dspace dspace-parent - 7.0 + 8.0-SNAPSHOT .. @@ -102,7 +102,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.0.0 + 3.4.0 validate @@ -116,7 +116,10 @@ org.codehaus.mojo buildnumber-maven-plugin - 1.4 + 3.2.0 + + UNKNOWN_REVISION + validate @@ -334,18 +337,47 @@ - + + org.apache.logging.log4j + log4j-api + org.hibernate - hibernate-ehcache + hibernate-core + + + + org.javassist + javassist + + + + + org.hibernate + hibernate-jcache + + + org.ehcache + ehcache + ${ehcache.version} + + + + org.springframework.boot + spring-boot-starter-cache + ${spring-boot.version} - - org.javassist - javassist + org.springframework.boot + spring-boot-starter-logging + + javax.cache + cache-api + org.hibernate hibernate-jpamodelgen @@ -358,7 +390,7 @@ org.hibernate.javax.persistence hibernate-jpa-2.1-api - 1.0.0.Final + 1.0.2.Final @@ -379,7 +411,7 @@ org.ow2.asm asm-commons - + org.bouncycastle bcpkix-jdk15on @@ -388,39 +420,6 @@ org.bouncycastle bcprov-jdk15on - - - org.eclipse.jetty - jetty-alpn-java-server - - - org.eclipse.jetty - jetty-deploy - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-servlets - - - org.eclipse.jetty - jetty-webapp - - - org.eclipse.jetty - jetty-xml - - - org.eclipse.jetty.http2 - http2-common - - - org.eclipse.jetty.http2 - http2-server - @@ -481,8 +480,8 @@ commons-validator - javax.mail - mail + com.sun.mail + javax.mail javax.servlet @@ -496,16 +495,10 @@ jaxen jaxen - - - xom - xom - - org.jdom - jdom + jdom2 org.apache.pdfbox @@ -515,22 +508,11 @@ org.apache.pdfbox fontbox - - org.apache.poi - poi-scratchpad - - - xalan - xalan - - - xerces - xercesImpl - com.ibm.icu icu4j + org.dspace oclc-harvester2 @@ -546,7 +528,7 @@ org.hamcrest - hamcrest-all + hamcrest test @@ -558,13 +540,6 @@ org.mockito mockito-inline test - - - - net.bytebuddy - byte-buddy - - org.springframework @@ -573,101 +548,49 @@ - org.rometools + com.rometools + rome + + + com.rometools rome-modules - 1.0 org.jbibtex jbibtex - 1.0.10 + 1.0.20 org.apache.httpcomponents httpclient + + org.apache.httpcomponents + httpcore + + + org.apache.httpcomponents + httpmime + + org.apache.solr solr-solrj ${solr.client.version} - - - - org.eclipse.jetty - jetty-http - - - org.eclipse.jetty - jetty-io - - - org.eclipse.jetty - jetty-util - - - + org.apache.solr solr-core test ${solr.client.version} - - - - org.apache.commons - commons-text - - - - org.eclipse.jetty - jetty-http - - - org.eclipse.jetty - jetty-io - - - org.eclipse.jetty - jetty-util - - - - - org.apache.solr - solr-cell - - - - org.apache.commons - commons-text - - - - org.eclipse.jetty - jetty-http - - - org.eclipse.jetty - jetty-io - - - org.eclipse.jetty - jetty-util - - org.apache.lucene lucene-core - - - org.apache.tika - tika-parsers - org.apache.lucene lucene-analyzers-icu @@ -683,15 +606,21 @@ lucene-analyzers-stempel test + + + + org.apache.tika + tika-core + - org.apache.xmlbeans - xmlbeans + org.apache.tika + tika-parsers-standard-package com.maxmind.geoip2 geoip2 - 2.11.0 + 2.17.0 org.apache.ant @@ -700,7 +629,7 @@ dnsjava dnsjava - 2.1.7 + 2.1.9 @@ -709,13 +638,6 @@ 1.1.1 - - - com.google.code.gson - gson - compile - - com.google.guava guava @@ -743,7 +665,7 @@ org.flywaydb flyway-core - 6.5.5 + 8.5.13 @@ -778,10 +700,6 @@ annotations - - joda-time - joda-time - javax.inject javax.inject @@ -799,44 +717,6 @@ jaxb-runtime - - - org.apache.ws.commons.axiom - axiom-impl - ${axiom.version} - - - - org.apache.geronimo.specs - * - - - - org.codehaus.woodstox - woodstox-core-asl - - - - - org.apache.ws.commons.axiom - axiom-api - ${axiom.version} - - - - org.apache.geronimo.specs - * - - - - org.codehaus.woodstox - woodstox-core-asl - - - - org.glassfish.jersey.core @@ -855,7 +735,7 @@ com.amazonaws aws-java-sdk-s3 - 1.10.50 + 1.12.261 @@ -889,37 +769,169 @@ org.json json - 20180130 + 20231013 + + + + + com.github.stefanbirkner + system-rules + 1.19.0 + test com.opencsv opencsv - 5.2 + 5.9 org.apache.velocity velocity-engine-core - 2.0 - jar org.xmlunit xmlunit-core - 2.6.3 test - + org.apache.bcel bcel - 6.4.0 + 6.7.0 + test + + + + + eu.openaire + funders-model + 2.0.0 + + + + org.javassist + javassist + + + + + + eu.openaire + broker-client + 1.1.2 + + org.mock-server + mockserver-junit-rule + 5.11.2 + test + + + + org.yaml + snakeyaml + + + + + + io.findify + s3mock_2.13 + 0.2.6 + test + + + com.amazonawsl + aws-java-sdk-s3 + + + com.amazonaws + aws-java-sdk-s3 + + + + + + + + + + io.netty + netty-buffer + 4.1.106.Final + + + io.netty + netty-transport + 4.1.106.Final + + + io.netty + netty-transport-native-unix-common + 4.1.106.Final + + + io.netty + netty-common + 4.1.106.Final + + + io.netty + netty-handler + 4.1.106.Final + + + io.netty + netty-codec + 4.1.106.Final + + + org.apache.velocity + velocity-engine-core + 2.3 + + + org.xmlunit + xmlunit-core + 2.9.1 + test + + + com.github.java-json-tools + json-schema-validator + 2.2.14 + + + jakarta.xml.bind + jakarta.xml.bind-api + 2.3.3 + + + javax.validation + validation-api + 2.0.1.Final + + + io.swagger + swagger-core + 1.6.2 + + + org.scala-lang + scala-library + 2.13.11 + test + + + + diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java new file mode 100644 index 000000000000..2d782dc3b82a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import java.sql.SQLException; +import java.util.Date; + +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * Plugin interface for the access status calculation. + */ +public interface AccessStatusHelper { + /** + * Calculate the access status for the item. + * + * @param context the DSpace context + * @param item the item + * @param threshold the embargo threshold date + * @return an access status value + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getAccessStatusFromItem(Context context, Item item, Date threshold) + throws SQLException; + + /** + * Retrieve embargo information for the item + * + * @param context the DSpace context + * @param item the item to check for embargo information + * @param threshold the embargo threshold date + * @return an embargo date + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getEmbargoFromItem(Context context, Item item, Date threshold) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java new file mode 100644 index 000000000000..01b370747932 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java @@ -0,0 +1,75 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import java.sql.SQLException; +import java.time.LocalDate; +import java.time.ZoneId; +import java.util.Date; + +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.service.PluginService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation for the access status calculation service. + */ +public class AccessStatusServiceImpl implements AccessStatusService { + // Plugin implementation, set from the DSpace configuration by init(). + protected AccessStatusHelper helper = null; + + protected Date forever_date = null; + + @Autowired(required = true) + protected ConfigurationService configurationService; + + @Autowired(required = true) + protected PluginService pluginService; + + /** + * Initialize the bean (after dependency injection has already taken place). + * Ensures the configurationService is injected, so that we can get the plugin + * and the forever embargo date threshold from the configuration. + * Called by "init-method" in Spring configuration. + * + * @throws Exception on generic exception + */ + public void init() throws Exception { + if (helper == null) { + helper = (AccessStatusHelper) pluginService.getSinglePlugin(AccessStatusHelper.class); + if (helper == null) { + throw new IllegalStateException("The AccessStatusHelper plugin was not defined in " + + "DSpace configuration."); + } + + // Defines the embargo forever date threshold for the access status. + // Look at EmbargoService.FOREVER for some improvements? + int year = configurationService.getIntProperty("access.status.embargo.forever.year"); + int month = configurationService.getIntProperty("access.status.embargo.forever.month"); + int day = configurationService.getIntProperty("access.status.embargo.forever.day"); + + forever_date = Date.from(LocalDate.of(year, month, day) + .atStartOfDay() + .atZone(ZoneId.systemDefault()) + .toInstant()); + } + } + + @Override + public String getAccessStatus(Context context, Item item) throws SQLException { + return helper.getAccessStatusFromItem(context, item, forever_date); + } + + @Override + public String getEmbargoFromItem(Context context, Item item) throws SQLException { + return helper.getEmbargoFromItem(context, item, forever_date); + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java new file mode 100644 index 000000000000..5f0e6d8b259b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java @@ -0,0 +1,248 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import java.sql.SQLException; +import java.util.Date; +import java.util.List; +import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.eperson.Group; +import org.joda.time.LocalDate; + +/** + * Default plugin implementation of the access status helper. + * The getAccessStatusFromItem method provides a simple logic to + * calculate the access status of an item based on the policies of + * the primary or the first bitstream in the original bundle. + * Users can override this method for enhanced functionality. + * + * The getEmbargoInformationFromItem method provides a simple logic to + * * retrieve embargo information of bitstreams from an item based on the policies of + * * the primary or the first bitstream in the original bundle. + * * Users can override this method for enhanced functionality. + */ +public class DefaultAccessStatusHelper implements AccessStatusHelper { + public static final String EMBARGO = "embargo"; + public static final String METADATA_ONLY = "metadata.only"; + public static final String OPEN_ACCESS = "open.access"; + public static final String RESTRICTED = "restricted"; + public static final String UNKNOWN = "unknown"; + + protected ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + protected ResourcePolicyService resourcePolicyService = + AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + protected AuthorizeService authorizeService = + AuthorizeServiceFactory.getInstance().getAuthorizeService(); + + public DefaultAccessStatusHelper() { + super(); + } + + /** + * Look at the item's policies to determine an access status value. + * It is also considering a date threshold for embargoes and restrictions. + * + * If the item is null, simply returns the "unknown" value. + * + * @param context the DSpace context + * @param item the item to check for embargoes + * @param threshold the embargo threshold date + * @return an access status value + */ + @Override + public String getAccessStatusFromItem(Context context, Item item, Date threshold) + throws SQLException { + if (item == null) { + return UNKNOWN; + } + // Consider only the original bundles. + List bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME); + // Check for primary bitstreams first. + Bitstream bitstream = bundles.stream() + .map(bundle -> bundle.getPrimaryBitstream()) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); + if (bitstream == null) { + // If there is no primary bitstream, + // take the first bitstream in the bundles. + bitstream = bundles.stream() + .map(bundle -> bundle.getBitstreams()) + .flatMap(List::stream) + .findFirst() + .orElse(null); + } + return calculateAccessStatusForDso(context, bitstream, threshold); + } + + /** + * Look at the DSpace object's policies to determine an access status value. + * + * If the object is null, returns the "metadata.only" value. + * If any policy attached to the object is valid for the anonymous group, + * returns the "open.access" value. + * Otherwise, if the policy start date is before the embargo threshold date, + * returns the "embargo" value. + * Every other cases return the "restricted" value. + * + * @param context the DSpace context + * @param dso the DSpace object + * @param threshold the embargo threshold date + * @return an access status value + */ + private String calculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold) + throws SQLException { + if (dso == null) { + return METADATA_ONLY; + } + // Only consider read policies. + List policies = authorizeService + .getPoliciesActionFilter(context, dso, Constants.READ); + int openAccessCount = 0; + int embargoCount = 0; + int restrictedCount = 0; + int unknownCount = 0; + // Looks at all read policies. + for (ResourcePolicy policy : policies) { + boolean isValid = resourcePolicyService.isDateValid(policy); + Group group = policy.getGroup(); + // The group must not be null here. However, + // if it is, consider this as an unexpected case. + if (group == null) { + unknownCount++; + } else if (StringUtils.equals(group.getName(), Group.ANONYMOUS)) { + // Only calculate the status for the anonymous group. + if (isValid) { + // If the policy is valid, the anonymous group have access + // to the bitstream. + openAccessCount++; + } else { + Date startDate = policy.getStartDate(); + if (startDate != null && !startDate.before(threshold)) { + // If the policy start date have a value and if this value + // is equal or superior to the configured forever date, the + // access status is also restricted. + restrictedCount++; + } else { + // If the current date is not between the policy start date + // and end date, the access status is embargo. + embargoCount++; + } + } + } + } + if (openAccessCount > 0) { + return OPEN_ACCESS; + } + if (embargoCount > 0 && restrictedCount == 0) { + return EMBARGO; + } + if (unknownCount > 0) { + return UNKNOWN; + } + return RESTRICTED; + } + + /** + * Look at the policies of the primary (or first) bitstream of the item to retrieve its embargo. + * + * If the item is null, simply returns an empty map with no embargo information. + * + * @param context the DSpace context + * @param item the item to embargo + * @return an access status value + */ + @Override + public String getEmbargoFromItem(Context context, Item item, Date threshold) + throws SQLException { + Date embargoDate; + + // If Item status is not "embargo" then return a null embargo date. + String accessStatus = getAccessStatusFromItem(context, item, threshold); + + if (item == null || !accessStatus.equals(EMBARGO)) { + return null; + } + // Consider only the original bundles. + List bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME); + // Check for primary bitstreams first. + Bitstream bitstream = bundles.stream() + .map(bundle -> bundle.getPrimaryBitstream()) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); + if (bitstream == null) { + // If there is no primary bitstream, + // take the first bitstream in the bundles. + bitstream = bundles.stream() + .map(bundle -> bundle.getBitstreams()) + .flatMap(List::stream) + .findFirst() + .orElse(null); + } + + if (bitstream == null) { + return null; + } + + embargoDate = this.retrieveShortestEmbargo(context, bitstream); + + return embargoDate != null ? embargoDate.toString() : null; + } + + /** + * + */ + private Date retrieveShortestEmbargo(Context context, Bitstream bitstream) throws SQLException { + Date embargoDate = null; + // Only consider read policies. + List policies = authorizeService + .getPoliciesActionFilter(context, bitstream, Constants.READ); + + // Looks at all read policies. + for (ResourcePolicy policy : policies) { + boolean isValid = resourcePolicyService.isDateValid(policy); + Group group = policy.getGroup(); + + if (group != null && StringUtils.equals(group.getName(), Group.ANONYMOUS)) { + // Only calculate the status for the anonymous group. + if (!isValid) { + // If the policy is not valid there is an active embargo + Date startDate = policy.getStartDate(); + + if (startDate != null && !startDate.before(LocalDate.now().toDate())) { + // There is an active embargo: aim to take the shortest embargo (account for rare cases where + // more than one resource policy exists) + if (embargoDate == null) { + embargoDate = startDate; + } else { + embargoDate = startDate.before(embargoDate) ? startDate : embargoDate; + } + } + } + } + } + + return embargoDate; + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactory.java b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactory.java new file mode 100644 index 000000000000..77d8f6b44876 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactory.java @@ -0,0 +1,25 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status.factory; + +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Abstract factory to get services for the access status package, + * use AccessStatusServiceFactory.getInstance() to retrieve an implementation. + */ +public abstract class AccessStatusServiceFactory { + + public abstract AccessStatusService getAccessStatusService(); + + public static AccessStatusServiceFactory getInstance() { + return DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName("accessStatusServiceFactory", AccessStatusServiceFactory.class); + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactoryImpl.java new file mode 100644 index 000000000000..fe3848cb2b21 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactoryImpl.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status.factory; + +import org.dspace.access.status.service.AccessStatusService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Factory implementation to get services for the access status package, + * use AccessStatusServiceFactory.getInstance() to retrieve an implementation. + */ +public class AccessStatusServiceFactoryImpl extends AccessStatusServiceFactory { + + @Autowired(required = true) + private AccessStatusService accessStatusService; + + @Override + public AccessStatusService getAccessStatusService() { + return accessStatusService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/package-info.java b/dspace-api/src/main/java/org/dspace/access/status/package-info.java new file mode 100644 index 000000000000..2c0ed22cd4a9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/package-info.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +/** + *

+ * Access status allows the users to view the bitstreams availability before + * browsing into the item itself. + *

+ *

+ * The access status is calculated through a pluggable class: + * {@link org.dspace.access.status.AccessStatusHelper}. + * The {@link org.dspace.access.status.AccessStatusServiceImpl} + * must be configured to specify this class, as well as a forever embargo date + * threshold year, month and day. + *

+ *

+ * See {@link org.dspace.access.status.DefaultAccessStatusHelper} for a simple calculation + * based on the primary or the first bitstream of the original bundle. You can + * supply your own class to implement more complex access statuses. + *

+ *

+ * For now, the access status is calculated when the item is shown in a list. + *

+ */ + +package org.dspace.access.status; diff --git a/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java new file mode 100644 index 000000000000..2ed47bde4cd2 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java @@ -0,0 +1,57 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status.service; + +import java.sql.SQLException; + +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * Public interface to the access status subsystem. + *

+ * Configuration properties: (with examples) + * {@code + * # values for the forever embargo date threshold + * # This threshold date is used in the default access status helper to dermine if an item is + * # restricted or embargoed based on the start date of the primary (or first) file policies. + * # In this case, if the policy start date is inferior to the threshold date, the status will + * # be embargo, else it will be restricted. + * # You might want to change this threshold based on your needs. For example: some databases + * # doesn't accept a date superior to 31 december 9999. + * access.status.embargo.forever.year = 10000 + * access.status.embargo.forever.month = 1 + * access.status.embargo.forever.day = 1 + * # implementation of access status helper plugin - replace with local implementation if applicable + * # This default access status helper provides an item status based on the policies of the primary + * # bitstream (or first bitstream in the original bundles if no primary file is specified). + * plugin.single.org.dspace.access.status.AccessStatusHelper = org.dspace.access.status.DefaultAccessStatusHelper + * } + */ +public interface AccessStatusService { + + /** + * Calculate the access status for an Item while considering the forever embargo date threshold. + * + * @param context the DSpace context + * @param item the item + * @return an access status value + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getAccessStatus(Context context, Item item) throws SQLException; + + /** + * Retrieve embargo information for the item + * + * @param context the DSpace context + * @param item the item to check for embargo information + * @return an embargo date + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getEmbargoFromItem(Context context, Item item) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java b/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java index 80d69f3b661b..81250e9c8259 100644 --- a/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java +++ b/dspace-api/src/main/java/org/dspace/administer/CreateAdministrator.java @@ -14,6 +14,7 @@ import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.lang3.StringUtils; import org.dspace.core.Context; @@ -54,14 +55,14 @@ public final class CreateAdministrator { protected GroupService groupService; /** - * For invoking via the command line. If called with no command line arguments, + * For invoking via the command line. If called with no command line arguments, * it will negotiate with the user for the administrator details * * @param argv the command line arguments given * @throws Exception if error */ public static void main(String[] argv) - throws Exception { + throws Exception { CommandLineParser parser = new DefaultParser(); Options options = new Options(); @@ -69,19 +70,41 @@ public static void main(String[] argv) options.addOption("e", "email", true, "administrator email address"); options.addOption("f", "first", true, "administrator first name"); + options.addOption("h", "help", false, "explain create-administrator options"); options.addOption("l", "last", true, "administrator last name"); options.addOption("c", "language", true, "administrator language"); options.addOption("p", "password", true, "administrator password"); - CommandLine line = parser.parse(options, argv); + CommandLine line = null; + + try { + + line = parser.parse(options, argv); + + } catch (Exception e) { + + System.out.println(e.getMessage() + "\nTry \"dspace create-administrator -h\" to print help information."); + System.exit(1); + + } if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") && - line.hasOption("c") && line.hasOption("p")) { + line.hasOption("c") && line.hasOption("p")) { ca.createAdministrator(line.getOptionValue("e"), - line.getOptionValue("f"), line.getOptionValue("l"), - line.getOptionValue("c"), line.getOptionValue("p")); + line.getOptionValue("f"), line.getOptionValue("l"), + line.getOptionValue("c"), line.getOptionValue("p")); + } else if (line.hasOption("h")) { + String header = "\nA command-line tool for creating an initial administrator for setting up a" + + " DSpace site. Unless all the required parameters are passed it will" + + " prompt for an e-mail address, last name, first name and password from" + + " standard input.. An administrator group is then created and the data passed" + + " in used to create an e-person in that group.\n\n"; + String footer = "\n"; + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp("dspace create-administrator", header, options, footer, true); + return; } else { - ca.negotiateAdministratorDetails(); + ca.negotiateAdministratorDetails(line); } } @@ -91,7 +114,7 @@ public static void main(String[] argv) * @throws Exception if error */ protected CreateAdministrator() - throws Exception { + throws Exception { context = new Context(); groupService = EPersonServiceFactory.getInstance().getGroupService(); ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); @@ -103,20 +126,20 @@ protected CreateAdministrator() * * @throws Exception if error */ - protected void negotiateAdministratorDetails() - throws Exception { + protected void negotiateAdministratorDetails(CommandLine line) + throws Exception { Console console = System.console(); System.out.println("Creating an initial administrator account"); - boolean dataOK = false; - - String email = null; - String firstName = null; - String lastName = null; - char[] password1 = null; - char[] password2 = null; + String email = line.getOptionValue('e'); + String firstName = line.getOptionValue('f'); + String lastName = line.getOptionValue('l'); String language = I18nUtil.getDefaultLocale().getLanguage(); + ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService(); + boolean flag = line.hasOption('p'); + char[] password = null; + boolean dataOK = line.hasOption('f') && line.hasOption('e') && line.hasOption('l'); while (!dataOK) { System.out.print("E-mail address: "); @@ -147,8 +170,6 @@ protected void negotiateAdministratorDetails() if (lastName != null) { lastName = lastName.trim(); } - - ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService(); if (cfg.hasProperty("webui.supported.locales")) { System.out.println("Select one of the following languages: " + cfg.getProperty("webui.supported.locales")); @@ -163,46 +184,59 @@ protected void negotiateAdministratorDetails() } } - System.out.println("Password will not display on screen."); - System.out.print("Password: "); + System.out.print("Is the above data correct? (y or n): "); System.out.flush(); - password1 = console.readPassword(); + String s = console.readLine(); - System.out.print("Again to confirm: "); - System.out.flush(); + if (s != null) { + s = s.trim(); + if (s.toLowerCase().startsWith("y")) { + dataOK = true; + } + } + + } + if (!flag) { + password = getPassword(console); + if (password == null) { + return; + } + } else { + password = line.getOptionValue("p").toCharArray(); + } + // if we make it to here, we are ready to create an administrator + createAdministrator(email, firstName, lastName, language, String.valueOf(password)); - password2 = console.readPassword(); + } - //TODO real password validation - if (password1.length > 1 && Arrays.equals(password1, password2)) { - // password OK - System.out.print("Is the above data correct? (y or n): "); - System.out.flush(); + private char[] getPassword(Console console) { + char[] password1 = null; + char[] password2 = null; + System.out.println("Password will not display on screen."); + System.out.print("Password: "); + System.out.flush(); - String s = console.readLine(); + password1 = console.readPassword(); - if (s != null) { - s = s.trim(); - if (s.toLowerCase().startsWith("y")) { - dataOK = true; - } - } - } else { - System.out.println("Passwords don't match"); - } - } + System.out.print("Again to confirm: "); + System.out.flush(); - // if we make it to here, we are ready to create an administrator - createAdministrator(email, firstName, lastName, language, String.valueOf(password1)); + password2 = console.readPassword(); - //Cleaning arrays that held password - Arrays.fill(password1, ' '); - Arrays.fill(password2, ' '); + // TODO real password validation + if (password1.length > 1 && Arrays.equals(password1, password2)) { + // password OK + Arrays.fill(password2, ' '); + return password1; + } else { + System.out.println("Passwords don't match"); + return null; + } } /** - * Create the administrator with the given details. If the user + * Create the administrator with the given details. If the user * already exists then they are simply upped to administrator status * * @param email the email for the user @@ -213,8 +247,8 @@ protected void negotiateAdministratorDetails() * @throws Exception if error */ protected void createAdministrator(String email, String first, String last, - String language, String pw) - throws Exception { + String language, String pw) + throws Exception { // Of course we aren't an administrator yet so we need to // circumvent authorisation context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java index 37a89fa6943a..2677cb20501f 100644 --- a/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java +++ b/dspace-api/src/main/java/org/dspace/administer/MetadataImporter.java @@ -11,13 +11,16 @@ import java.sql.SQLException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.xpath.XPathAPI; import org.dspace.authorize.AuthorizeException; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; @@ -81,7 +84,7 @@ private MetadataImporter() { } * @throws SQLException if database error * @throws IOException if IO error * @throws TransformerException if transformer error - * @throws ParserConfigurationException if config error + * @throws ParserConfigurationException if configuration error * @throws AuthorizeException if authorization error * @throws SAXException if parser error * @throws NonUniqueMetadataException if duplicate metadata @@ -90,8 +93,7 @@ private MetadataImporter() { } public static void main(String[] args) throws ParseException, SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException, SAXException, - NonUniqueMetadataException, RegistryImportException { - boolean forceUpdate = false; + NonUniqueMetadataException, RegistryImportException, XPathExpressionException { // create an options object and populate it CommandLineParser parser = new DefaultParser(); @@ -100,16 +102,14 @@ public static void main(String[] args) options.addOption("u", "update", false, "update an existing schema"); CommandLine line = parser.parse(options, args); - String file = null; if (line.hasOption('f')) { - file = line.getOptionValue('f'); + String file = line.getOptionValue('f'); + boolean forceUpdate = line.hasOption('u'); + loadRegistry(file, forceUpdate); } else { usage(); - System.exit(0); + System.exit(1); } - - forceUpdate = line.hasOption('u'); - loadRegistry(file, forceUpdate); } /** @@ -120,15 +120,15 @@ public static void main(String[] args) * @throws SQLException if database error * @throws IOException if IO error * @throws TransformerException if transformer error - * @throws ParserConfigurationException if config error + * @throws ParserConfigurationException if configuration error * @throws AuthorizeException if authorization error * @throws SAXException if parser error * @throws NonUniqueMetadataException if duplicate metadata * @throws RegistryImportException if import fails */ public static void loadRegistry(String file, boolean forceUpdate) - throws SQLException, IOException, TransformerException, ParserConfigurationException, - AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException { + throws SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException, + SAXException, NonUniqueMetadataException, RegistryImportException, XPathExpressionException { Context context = null; try { @@ -140,7 +140,9 @@ public static void loadRegistry(String file, boolean forceUpdate) Document document = RegistryImporter.loadXML(file); // Get the nodes corresponding to types - NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList schemaNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-schema") + .evaluate(document, XPathConstants.NODESET); // Add each one as a new format to the registry for (int i = 0; i < schemaNodes.getLength(); i++) { @@ -149,7 +151,8 @@ public static void loadRegistry(String file, boolean forceUpdate) } // Get the nodes corresponding to types - NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type"); + NodeList typeNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-type") + .evaluate(document, XPathConstants.NODESET); // Add each one as a new format to the registry for (int i = 0; i < typeNodes.getLength(); i++) { @@ -181,8 +184,8 @@ public static void loadRegistry(String file, boolean forceUpdate) * @throws RegistryImportException if import fails */ private static void loadSchema(Context context, Node node, boolean updateExisting) - throws SQLException, IOException, TransformerException, - AuthorizeException, NonUniqueMetadataException, RegistryImportException { + throws SQLException, AuthorizeException, NonUniqueMetadataException, RegistryImportException, + XPathExpressionException { // Get the values String name = RegistryImporter.getElementData(node, "name"); String namespace = RegistryImporter.getElementData(node, "namespace"); @@ -227,7 +230,7 @@ private static void loadSchema(Context context, Node node, boolean updateExistin /** * Process a node in the metadata registry XML file. The node must * be a "dc-type" node. If the type already exists, then it - * will not be reimported + * will not be re-imported. * * @param context DSpace context object * @param node the node in the DOM tree @@ -239,8 +242,8 @@ private static void loadSchema(Context context, Node node, boolean updateExistin * @throws RegistryImportException if import fails */ private static void loadType(Context context, Node node) - throws SQLException, IOException, TransformerException, - AuthorizeException, NonUniqueMetadataException, RegistryImportException { + throws SQLException, IOException, AuthorizeException, NonUniqueMetadataException, RegistryImportException, + XPathExpressionException { // Get the values String schema = RegistryImporter.getElementData(node, "schema"); String element = RegistryImporter.getElementData(node, "element"); diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleaner.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleaner.java new file mode 100644 index 000000000000..ee6b8d08b059 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleaner.java @@ -0,0 +1,140 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang.time.DateUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.ProcessStatus; +import org.dspace.core.Context; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.Process; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ProcessService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; + +/** + * Script to cleanup the old processes in the specified state. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ProcessCleaner extends DSpaceRunnable> { + + private ConfigurationService configurationService; + + private ProcessService processService; + + + private boolean cleanCompleted = false; + + private boolean cleanFailed = false; + + private boolean cleanRunning = false; + + private boolean help = false; + + private Integer days; + + + @Override + public void setup() throws ParseException { + + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + this.processService = ScriptServiceFactory.getInstance().getProcessService(); + + this.help = commandLine.hasOption('h'); + this.cleanFailed = commandLine.hasOption('f'); + this.cleanRunning = commandLine.hasOption('r'); + this.cleanCompleted = commandLine.hasOption('c') || (!cleanFailed && !cleanRunning); + + this.days = configurationService.getIntProperty("process-cleaner.days", 14); + + if (this.days <= 0) { + throw new IllegalStateException("The number of days must be a positive integer."); + } + + } + + @Override + public void internalRun() throws Exception { + + if (help) { + printHelp(); + return; + } + + Context context = new Context(); + + try { + context.turnOffAuthorisationSystem(); + performDeletion(context); + } finally { + context.restoreAuthSystemState(); + context.complete(); + } + + } + + /** + * Delete the processes based on the specified statuses and the configured days + * from their creation. + */ + private void performDeletion(Context context) throws SQLException, IOException, AuthorizeException { + + List statuses = getProcessToDeleteStatuses(); + Date creationDate = calculateCreationDate(); + + handler.logInfo("Searching for processes with status: " + statuses); + List processes = processService.findByStatusAndCreationTimeOlderThan(context, statuses, creationDate); + handler.logInfo("Found " + processes.size() + " processes to be deleted"); + for (Process process : processes) { + processService.delete(context, process); + } + + handler.logInfo("Process cleanup completed"); + + } + + /** + * Returns the list of Process statuses do be deleted. + */ + private List getProcessToDeleteStatuses() { + List statuses = new ArrayList(); + if (cleanCompleted) { + statuses.add(ProcessStatus.COMPLETED); + } + if (cleanFailed) { + statuses.add(ProcessStatus.FAILED); + } + if (cleanRunning) { + statuses.add(ProcessStatus.RUNNING); + } + return statuses; + } + + private Date calculateCreationDate() { + return DateUtils.addDays(new Date(), -days); + } + + @Override + @SuppressWarnings("unchecked") + public ProcessCleanerConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("process-cleaner", ProcessCleanerConfiguration.class); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCli.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCli.java new file mode 100644 index 000000000000..292c6c372e4f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCli.java @@ -0,0 +1,18 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +/** + * The {@link ProcessCleaner} for CLI. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ProcessCleanerCli extends ProcessCleaner { + +} diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCliConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCliConfiguration.java new file mode 100644 index 000000000000..043990156d16 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerCliConfiguration.java @@ -0,0 +1,18 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +/** + * The {@link ProcessCleanerConfiguration} for CLI. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ProcessCleanerCliConfiguration extends ProcessCleanerConfiguration { + +} diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java new file mode 100644 index 000000000000..91dcfb5dfec5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java @@ -0,0 +1,53 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link ProcessCleaner} script. + */ +public class ProcessCleanerConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public Options getOptions() { + if (options == null) { + + Options options = new Options(); + + options.addOption("h", "help", false, "help"); + + options.addOption("r", "running", false, "delete the process with RUNNING status"); + options.getOption("r").setType(boolean.class); + + options.addOption("f", "failed", false, "delete the process with FAILED status"); + options.getOption("f").setType(boolean.class); + + options.addOption("c", "completed", false, + "delete the process with COMPLETED status (default if no statuses are specified)"); + options.getOption("c").setType(boolean.class); + + super.options = options; + } + return options; + } + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/administer/RegistryImporter.java b/dspace-api/src/main/java/org/dspace/administer/RegistryImporter.java index 5b5f70412ac2..27a653421312 100644 --- a/dspace-api/src/main/java/org/dspace/administer/RegistryImporter.java +++ b/dspace-api/src/main/java/org/dspace/administer/RegistryImporter.java @@ -13,8 +13,11 @@ import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; -import org.apache.xpath.XPathAPI; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; @@ -72,9 +75,10 @@ public static Document loadXML(String filename) * @throws TransformerException if error */ public static String getElementData(Node parentElement, String childName) - throws TransformerException { + throws XPathExpressionException { // Grab the child node - Node childNode = XPathAPI.selectSingleNode(parentElement, childName); + XPath xPath = XPathFactory.newInstance().newXPath(); + Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE); if (childNode == null) { // No child node, so no values @@ -115,9 +119,10 @@ public static String getElementData(Node parentElement, String childName) * @throws TransformerException if error */ public static String[] getRepeatedElementData(Node parentElement, - String childName) throws TransformerException { + String childName) throws XPathExpressionException { // Grab the child node - NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET); String[] data = new String[childNodes.getLength()]; diff --git a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java index b2f72802529d..bbf320a0d5e5 100644 --- a/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java +++ b/dspace-api/src/main/java/org/dspace/administer/RegistryLoader.java @@ -16,15 +16,18 @@ import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.logging.log4j.Logger; -import org.apache.xpath.XPathAPI; import org.dspace.authorize.AuthorizeException; import org.dspace.content.BitstreamFormat; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamFormatService; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; @@ -95,7 +98,7 @@ public static void main(String[] argv) throws Exception { System.exit(1); } catch (Exception e) { - log.fatal(LogManager.getHeader(context, "error_loading_registries", + log.fatal(LogHelper.getHeader(context, "error_loading_registries", ""), e); System.err.println("Error: \n - " + e.getMessage()); @@ -122,12 +125,13 @@ public static void main(String[] argv) throws Exception { */ public static void loadBitstreamFormats(Context context, String filename) throws SQLException, IOException, ParserConfigurationException, - SAXException, TransformerException, AuthorizeException { + SAXException, TransformerException, AuthorizeException, XPathExpressionException { Document document = loadXML(filename); // Get the nodes corresponding to formats - NodeList typeNodes = XPathAPI.selectNodeList(document, - "dspace-bitstream-types/bitstream-type"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList typeNodes = (NodeList) xPath.compile("dspace-bitstream-types/bitstream-type") + .evaluate(document, XPathConstants.NODESET); // Add each one as a new format to the registry for (int i = 0; i < typeNodes.getLength(); i++) { @@ -135,7 +139,7 @@ public static void loadBitstreamFormats(Context context, String filename) loadFormat(context, n); } - log.info(LogManager.getHeader(context, "load_bitstream_formats", + log.info(LogHelper.getHeader(context, "load_bitstream_formats", "number_loaded=" + typeNodes.getLength())); } @@ -151,8 +155,7 @@ public static void loadBitstreamFormats(Context context, String filename) * @throws AuthorizeException if authorization error */ private static void loadFormat(Context context, Node node) - throws SQLException, IOException, TransformerException, - AuthorizeException { + throws SQLException, AuthorizeException, XPathExpressionException { // Get the values String mimeType = getElementData(node, "mimetype"); String shortDesc = getElementData(node, "short_description"); @@ -231,9 +234,10 @@ private static Document loadXML(String filename) throws IOException, * @throws TransformerException if transformer error */ private static String getElementData(Node parentElement, String childName) - throws TransformerException { + throws XPathExpressionException { // Grab the child node - Node childNode = XPathAPI.selectSingleNode(parentElement, childName); + XPath xPath = XPathFactory.newInstance().newXPath(); + Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE); if (childNode == null) { // No child node, so no values @@ -274,9 +278,10 @@ private static String getElementData(Node parentElement, String childName) * @throws TransformerException if transformer error */ private static String[] getRepeatedElementData(Node parentElement, - String childName) throws TransformerException { + String childName) throws XPathExpressionException { // Grab the child node - NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET); String[] data = new String[childNodes.getLength()]; diff --git a/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java b/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java index 89d9ffe5a841..13a1b3b5bbf8 100644 --- a/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java +++ b/dspace-api/src/main/java/org/dspace/administer/StructBuilder.java @@ -30,6 +30,10 @@ import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -38,7 +42,7 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.xpath.XPathAPI; +import org.apache.commons.lang3.StringUtils; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -52,9 +56,11 @@ import org.dspace.core.Context; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; -import org.jdom.Element; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; @@ -76,6 +82,7 @@ * * * } + * *

* It can be arbitrarily deep, and supports all the metadata elements * that make up the community and collection metadata. See the system @@ -104,12 +111,14 @@ public class StructBuilder { */ private static final Map communityMap = new HashMap<>(); - protected static CommunityService communityService + protected static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - protected static CollectionService collectionService + protected static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected static EPersonService ePersonService + protected static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + protected static final HandleService handleService + = HandleServiceFactory.getInstance().getHandleService(); /** * Default constructor @@ -135,16 +144,18 @@ private StructBuilder() { } * @throws SQLException passed through. * @throws FileNotFoundException if input or output could not be opened. * @throws TransformerException if the input document is invalid. + * @throws XPathExpressionException passed through. */ public static void main(String[] argv) - throws ParserConfigurationException, SQLException, - FileNotFoundException, IOException, TransformerException { + throws ParserConfigurationException, SQLException, + IOException, TransformerException, XPathExpressionException { // Define command line options. Options options = new Options(); options.addOption("h", "help", false, "Print this help message."); options.addOption("?", "help"); options.addOption("x", "export", false, "Export the current structure as XML."); + options.addOption("k", "keep-handles", false, "Apply Handles from input document."); options.addOption(Option.builder("e").longOpt("eperson") .desc("User who is manipulating the repository's structure.") @@ -206,6 +217,7 @@ public static void main(String[] argv) // Export? Import? if (line.hasOption('x')) { // export exportStructure(context, outputStream); + outputStream.close(); } else { // Must be import String input = line.getOptionValue('f'); if (null == input) { @@ -220,7 +232,12 @@ public static void main(String[] argv) inputStream = new FileInputStream(input); } - importStructure(context, inputStream, outputStream); + boolean keepHandles = options.hasOption("k"); + importStructure(context, inputStream, outputStream, keepHandles); + + inputStream.close(); + outputStream.close(); + // save changes from import context.complete(); } @@ -233,14 +250,17 @@ public static void main(String[] argv) * @param context * @param input XML which describes the new communities and collections. * @param output input, annotated with the new objects' identifiers. + * @param keepHandles true if Handles should be set from input. * @throws IOException * @throws ParserConfigurationException * @throws SAXException * @throws TransformerException * @throws SQLException */ - static void importStructure(Context context, InputStream input, OutputStream output) - throws IOException, ParserConfigurationException, SQLException, TransformerException { + static void importStructure(Context context, InputStream input, + OutputStream output, boolean keepHandles) + throws IOException, ParserConfigurationException, SQLException, + TransformerException, XPathExpressionException { // load the XML Document document = null; @@ -258,15 +278,29 @@ static void importStructure(Context context, InputStream input, OutputStream out // is properly structured. try { validate(document); - } catch (TransformerException ex) { + } catch (XPathExpressionException ex) { System.err.format("The input document is invalid: %s%n", ex.getMessage()); System.exit(1); } // Check for 'identifier' attributes -- possibly output by this class. - NodeList identifierNodes = XPathAPI.selectNodeList(document, "//*[@identifier]"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]") + .evaluate(document, XPathConstants.NODESET); if (identifierNodes.getLength() > 0) { - System.err.println("The input document has 'identifier' attributes, which will be ignored."); + if (!keepHandles) { + System.err.println("The input document has 'identifier' attributes, which will be ignored."); + } else { + for (int i = 0; i < identifierNodes.getLength() ; i++) { + String identifier = identifierNodes.item(i).getAttributes().item(0).getTextContent(); + if (handleService.resolveToURL(context, identifier) != null) { + System.err.printf("The input document contains handle %s," + + " which is in use already. Aborting...%n", + identifier); + System.exit(1); + } + } + } } // load the mappings into the member variable hashmaps @@ -287,10 +321,11 @@ static void importStructure(Context context, InputStream input, OutputStream out Element[] elements = new Element[]{}; try { // get the top level community list - NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community"); + NodeList first = (NodeList) xPath.compile("/import_structure/community") + .evaluate(document, XPathConstants.NODESET); // run the import starting with the top level communities - elements = handleCommunities(context, first, null); + elements = handleCommunities(context, first, null, keepHandles); } catch (TransformerException ex) { System.err.format("Input content not understood: %s%n", ex.getMessage()); System.exit(1); @@ -307,7 +342,7 @@ static void importStructure(Context context, InputStream input, OutputStream out } // finally write the string into the output file. - final org.jdom.Document xmlOutput = new org.jdom.Document(root); + final org.jdom2.Document xmlOutput = new org.jdom2.Document(root); try { new XMLOutputter().output(xmlOutput, output); } catch (IOException e) { @@ -411,7 +446,7 @@ static void exportStructure(Context context, OutputStream output) { } // Now write the structure out. - org.jdom.Document xmlOutput = new org.jdom.Document(rootElement); + org.jdom2.Document xmlOutput = new org.jdom2.Document(rootElement); try { XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat()); outputter.output(xmlOutput, output); @@ -456,14 +491,16 @@ private static void giveHelp(Options options) { * @throws TransformerException if transformer error */ private static void validate(org.w3c.dom.Document document) - throws TransformerException { + throws XPathExpressionException { StringBuilder err = new StringBuilder(); boolean trip = false; err.append("The following errors were encountered parsing the source XML.\n"); err.append("No changes have been made to the DSpace instance.\n\n"); - NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList first = (NodeList) xPath.compile("/import_structure/community") + .evaluate(document, XPathConstants.NODESET); if (first.getLength() == 0) { err.append("-There are no top level communities in the source document."); System.out.println(err.toString()); @@ -493,14 +530,15 @@ private static void validate(org.w3c.dom.Document document) * no errors. */ private static String validateCommunities(NodeList communities, int level) - throws TransformerException { + throws XPathExpressionException { StringBuilder err = new StringBuilder(); boolean trip = false; String errs = null; + XPath xPath = XPathFactory.newInstance().newXPath(); for (int i = 0; i < communities.getLength(); i++) { Node n = communities.item(i); - NodeList name = XPathAPI.selectNodeList(n, "name"); + NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET); if (name.getLength() != 1) { String pos = Integer.toString(i + 1); err.append("-The level ").append(level) @@ -510,7 +548,7 @@ private static String validateCommunities(NodeList communities, int level) } // validate sub communities - NodeList subCommunities = XPathAPI.selectNodeList(n, "community"); + NodeList subCommunities = (NodeList) xPath.compile("community").evaluate(n, XPathConstants.NODESET); String comErrs = validateCommunities(subCommunities, level + 1); if (comErrs != null) { err.append(comErrs); @@ -518,7 +556,7 @@ private static String validateCommunities(NodeList communities, int level) } // validate collections - NodeList collections = XPathAPI.selectNodeList(n, "collection"); + NodeList collections = (NodeList) xPath.compile("collection").evaluate(n, XPathConstants.NODESET); String colErrs = validateCollections(collections, level + 1); if (colErrs != null) { err.append(colErrs); @@ -542,14 +580,15 @@ private static String validateCommunities(NodeList communities, int level) * @return the errors to be generated by the calling method, or null if none */ private static String validateCollections(NodeList collections, int level) - throws TransformerException { + throws XPathExpressionException { StringBuilder err = new StringBuilder(); boolean trip = false; String errs = null; + XPath xPath = XPathFactory.newInstance().newXPath(); for (int i = 0; i < collections.getLength(); i++) { Node n = collections.item(i); - NodeList name = XPathAPI.selectNodeList(n, "name"); + NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET); if (name.getLength() != 1) { String pos = Integer.toString(i + 1); err.append("-The level ").append(level) @@ -609,22 +648,29 @@ private static String getStringValue(Node node) { * @param context the context of the request * @param communities a nodelist of communities to create along with their sub-structures * @param parent the parent community of the nodelist of communities to create + * @param keepHandles use Handles from input. * @return an element array containing additional information regarding the * created communities (e.g. the handles they have been assigned) */ - private static Element[] handleCommunities(Context context, NodeList communities, Community parent) - throws TransformerException, SQLException, AuthorizeException { + private static Element[] handleCommunities(Context context, NodeList communities, + Community parent, boolean keepHandles) + throws TransformerException, SQLException, AuthorizeException, + XPathExpressionException { Element[] elements = new Element[communities.getLength()]; + XPath xPath = XPathFactory.newInstance().newXPath(); for (int i = 0; i < communities.getLength(); i++) { - Community community; - Element element = new Element("community"); + Node tn = communities.item(i); + Node identifier = tn.getAttributes().getNamedItem("identifier"); // create the community or sub community - if (parent != null) { + Community community; + if (null == identifier + || StringUtils.isBlank(identifier.getNodeValue()) + || !keepHandles) { community = communityService.create(parent, context); } else { - community = communityService.create(null, context); + community = communityService.create(parent, context, identifier.getNodeValue()); } // default the short description to be an empty string @@ -632,9 +678,8 @@ private static Element[] handleCommunities(Context context, NodeList communities MD_SHORT_DESCRIPTION, null, " "); // now update the metadata - Node tn = communities.item(i); for (Map.Entry entry : communityMap.entrySet()) { - NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey()); + NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET); if (nl.getLength() == 1) { communityService.setMetadataSingleValue(context, community, entry.getValue(), null, getStringValue(nl.item(0))); @@ -658,6 +703,7 @@ private static Element[] handleCommunities(Context context, NodeList communities // but it's here to keep it separate from the create process in // case // we want to move it or make it switchable later + Element element = new Element("community"); element.setAttribute("identifier", community.getHandle()); Element nameElement = new Element("name"); @@ -700,12 +746,16 @@ private static Element[] handleCommunities(Context context, NodeList communities } // handle sub communities - NodeList subCommunities = XPathAPI.selectNodeList(tn, "community"); - Element[] subCommunityElements = handleCommunities(context, subCommunities, community); + NodeList subCommunities = (NodeList) xPath.compile("community") + .evaluate(tn, XPathConstants.NODESET); + Element[] subCommunityElements = handleCommunities(context, + subCommunities, community, keepHandles); // handle collections - NodeList collections = XPathAPI.selectNodeList(tn, "collection"); - Element[] collectionElements = handleCollections(context, collections, community); + NodeList collections = (NodeList) xPath.compile("collection") + .evaluate(tn, XPathConstants.NODESET); + Element[] collectionElements = handleCollections(context, + collections, community, keepHandles); int j; for (j = 0; j < subCommunityElements.length; j++) { @@ -730,22 +780,33 @@ private static Element[] handleCommunities(Context context, NodeList communities * @return an Element array containing additional information about the * created collections (e.g. the handle) */ - private static Element[] handleCollections(Context context, NodeList collections, Community parent) - throws TransformerException, SQLException, AuthorizeException { + private static Element[] handleCollections(Context context, + NodeList collections, Community parent, boolean keepHandles) + throws SQLException, AuthorizeException, XPathExpressionException { Element[] elements = new Element[collections.getLength()]; + XPath xPath = XPathFactory.newInstance().newXPath(); for (int i = 0; i < collections.getLength(); i++) { - Element element = new Element("collection"); - Collection collection = collectionService.create(context, parent); + Node tn = collections.item(i); + Node identifier = tn.getAttributes().getNamedItem("identifier"); + + // Create the Collection. + Collection collection; + if (null == identifier + || StringUtils.isBlank(identifier.getNodeValue()) + || !keepHandles) { + collection = collectionService.create(context, parent); + } else { + collection = collectionService.create(context, parent, identifier.getNodeValue()); + } // default the short description to the empty string collectionService.setMetadataSingleValue(context, collection, MD_SHORT_DESCRIPTION, Item.ANY, " "); // import the rest of the metadata - Node tn = collections.item(i); for (Map.Entry entry : collectionMap.entrySet()) { - NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey()); + NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET); if (nl.getLength() == 1) { collectionService.setMetadataSingleValue(context, collection, entry.getValue(), null, getStringValue(nl.item(0))); @@ -754,6 +815,7 @@ private static Element[] handleCollections(Context context, NodeList collections collectionService.update(context, collection); + Element element = new Element("collection"); element.setAttribute("identifier", collection.getHandle()); Element nameElement = new Element("name"); diff --git a/dspace-api/src/main/java/org/dspace/alerts/AllowSessionsEnum.java b/dspace-api/src/main/java/org/dspace/alerts/AllowSessionsEnum.java new file mode 100644 index 000000000000..a200cab8781f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/alerts/AllowSessionsEnum.java @@ -0,0 +1,54 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.alerts; + +/** + * Enum representing the options for allowing sessions: + * ALLOW_ALL_SESSIONS - Will allow all users to log in and continue their sessions + * ALLOW_CURRENT_SESSIONS_ONLY - Will prevent non admin users from logging in, however logged-in users + * will remain logged in + * ALLOW_ADMIN_SESSIONS_ONLY - Only admin users can log in, non admin sessions will be interrupted + * + * NOTE: This functionality can be stored in the database, but no support is present right now to interrupt and prevent + * sessions. + */ +public enum AllowSessionsEnum { + ALLOW_ALL_SESSIONS("all"), + ALLOW_CURRENT_SESSIONS_ONLY("current"), + ALLOW_ADMIN_SESSIONS_ONLY("admin"); + + private String allowSessionsType; + + AllowSessionsEnum(String allowSessionsType) { + this.allowSessionsType = allowSessionsType; + } + + public String getValue() { + return allowSessionsType; + } + + public static AllowSessionsEnum fromString(String alertAllowSessionType) { + if (alertAllowSessionType == null) { + return AllowSessionsEnum.ALLOW_ALL_SESSIONS; + } + + switch (alertAllowSessionType) { + case "all": + return AllowSessionsEnum.ALLOW_ALL_SESSIONS; + case "current": + return AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY; + case "admin" : + return AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY; + default: + throw new IllegalArgumentException("No corresponding enum value for provided string: " + + alertAllowSessionType); + } + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/alerts/SystemWideAlert.java b/dspace-api/src/main/java/org/dspace/alerts/SystemWideAlert.java new file mode 100644 index 000000000000..f56cbdcce9e9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/alerts/SystemWideAlert.java @@ -0,0 +1,179 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.alerts; + +import java.util.Date; +import javax.persistence.Cacheable; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; + +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.dspace.core.ReloadableEntity; +import org.hibernate.annotations.CacheConcurrencyStrategy; + +/** + * Database object representing system-wide alerts + */ +@Entity +@Cacheable +@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy") +@Table(name = "systemwidealert") +public class SystemWideAlert implements ReloadableEntity { + + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "alert_id_seq") + @SequenceGenerator(name = "alert_id_seq", sequenceName = "alert_id_seq", allocationSize = 1) + @Column(name = "alert_id", unique = true, nullable = false) + private Integer alertId; + + @Column(name = "message", nullable = false) + private String message; + + @Column(name = "allow_sessions") + private String allowSessions; + + @Column(name = "countdown_to") + @Temporal(TemporalType.TIMESTAMP) + private Date countdownTo; + + @Column(name = "active") + private boolean active; + + protected SystemWideAlert() { + } + + /** + * This method returns the ID that the system-wide alert holds within the database + * + * @return The ID that the system-wide alert holds within the database + */ + @Override + public Integer getID() { + return alertId; + } + + /** + * Set the ID for the system-wide alert + * + * @param alertID The ID to set + */ + public void setID(final Integer alertID) { + this.alertId = alertID; + } + + /** + * Retrieve the message of the system-wide alert + * + * @return the message of the system-wide alert + */ + public String getMessage() { + return message; + } + + /** + * Set the message of the system-wide alert + * + * @param message The message to set + */ + public void setMessage(final String message) { + this.message = message; + } + + /** + * Retrieve what kind of sessions are allowed while the system-wide alert is active + * + * @return what kind of sessions are allowed while the system-wide alert is active + */ + public AllowSessionsEnum getAllowSessions() { + return AllowSessionsEnum.fromString(allowSessions); + } + + /** + * Set what kind of sessions are allowed while the system-wide alert is active + * + * @param allowSessions Integer representing what kind of sessions are allowed + */ + public void setAllowSessions(AllowSessionsEnum allowSessions) { + this.allowSessions = allowSessions.getValue(); + } + + /** + * Retrieve the date to which will be count down when the system-wide alert is active + * + * @return the date to which will be count down when the system-wide alert is active + */ + public Date getCountdownTo() { + return countdownTo; + } + + /** + * Set the date to which will be count down when the system-wide alert is active + * + * @param countdownTo The date to which will be count down + */ + public void setCountdownTo(final Date countdownTo) { + this.countdownTo = countdownTo; + } + + /** + * Retrieve whether the system-wide alert is active + * + * @return whether the system-wide alert is active + */ + public boolean isActive() { + return active; + } + + /** + * Set whether the system-wide alert is active + * + * @param active Whether the system-wide alert is active + */ + public void setActive(final boolean active) { + this.active = active; + } + + /** + * Return true if other is the same SystemWideAlert + * as this object, false otherwise + * + * @param other object to compare to + * @return true if object passed in represents the same + * system-wide alert as this object + */ + @Override + public boolean equals(Object other) { + return (other instanceof SystemWideAlert && + new EqualsBuilder().append(this.getID(), ((SystemWideAlert) other).getID()) + .append(this.getMessage(), ((SystemWideAlert) other).getMessage()) + .append(this.getAllowSessions(), ((SystemWideAlert) other).getAllowSessions()) + .append(this.getCountdownTo(), ((SystemWideAlert) other).getCountdownTo()) + .append(this.isActive(), ((SystemWideAlert) other).isActive()) + .isEquals()); + } + + @Override + public int hashCode() { + return new HashCodeBuilder(17, 37) + .append(this.getID()) + .append(this.getMessage()) + .append(this.getAllowSessions()) + .append(this.getCountdownTo()) + .append(this.isActive()) + .toHashCode(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/alerts/SystemWideAlertServiceImpl.java b/dspace-api/src/main/java/org/dspace/alerts/SystemWideAlertServiceImpl.java new file mode 100644 index 000000000000..9ddf6c97d111 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/alerts/SystemWideAlertServiceImpl.java @@ -0,0 +1,129 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.alerts; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Date; +import java.util.List; + +import org.apache.logging.log4j.Logger; +import org.dspace.alerts.dao.SystemWideAlertDAO; +import org.dspace.alerts.service.SystemWideAlertService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.core.LogHelper; +import org.dspace.eperson.EPerson; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * The implementation for the {@link SystemWideAlertService} class + */ +public class SystemWideAlertServiceImpl implements SystemWideAlertService { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SystemWideAlertService.class); + + + @Autowired + private SystemWideAlertDAO systemWideAlertDAO; + + @Autowired + private AuthorizeService authorizeService; + + @Override + public SystemWideAlert create(final Context context, final String message, + final AllowSessionsEnum allowSessionsType, + final Date countdownTo, final boolean active) throws SQLException, + AuthorizeException { + if (!authorizeService.isAdmin(context)) { + throw new AuthorizeException( + "Only administrators can create a system-wide alert"); + } + SystemWideAlert systemWideAlert = new SystemWideAlert(); + systemWideAlert.setMessage(message); + systemWideAlert.setAllowSessions(allowSessionsType); + systemWideAlert.setCountdownTo(countdownTo); + systemWideAlert.setActive(active); + + SystemWideAlert createdAlert = systemWideAlertDAO.create(context, systemWideAlert); + log.info(LogHelper.getHeader(context, "system_wide_alert_create", + "System Wide Alert has been created with message: '" + message + "' and ID " + + createdAlert.getID() + " and allowSessionsType " + allowSessionsType + + " and active set to " + active)); + + + return createdAlert; + } + + @Override + public SystemWideAlert find(final Context context, final int alertId) throws SQLException { + return systemWideAlertDAO.findByID(context, SystemWideAlert.class, alertId); + } + + @Override + public List findAll(final Context context) throws SQLException { + return systemWideAlertDAO.findAll(context, SystemWideAlert.class); + } + + @Override + public List findAll(final Context context, final int limit, final int offset) throws SQLException { + return systemWideAlertDAO.findAll(context, limit, offset); + } + + @Override + public List findAllActive(final Context context, final int limit, final int offset) + throws SQLException { + return systemWideAlertDAO.findAllActive(context, limit, offset); + } + + @Override + public void delete(final Context context, final SystemWideAlert systemWideAlert) + throws SQLException, IOException, AuthorizeException { + if (!authorizeService.isAdmin(context)) { + throw new AuthorizeException( + "Only administrators can create a system-wide alert"); + } + systemWideAlertDAO.delete(context, systemWideAlert); + log.info(LogHelper.getHeader(context, "system_wide_alert_create", + "System Wide Alert with ID " + systemWideAlert.getID() + " has been deleted")); + + } + + @Override + public void update(final Context context, final SystemWideAlert systemWideAlert) + throws SQLException, AuthorizeException { + if (!authorizeService.isAdmin(context)) { + throw new AuthorizeException( + "Only administrators can create a system-wide alert"); + } + systemWideAlertDAO.save(context, systemWideAlert); + + } + + @Override + public boolean canNonAdminUserLogin(Context context) throws SQLException { + List active = findAllActive(context, 1, 0); + if (active == null || active.isEmpty()) { + return true; + } + return active.get(0).getAllowSessions() == AllowSessionsEnum.ALLOW_ALL_SESSIONS; + } + + @Override + public boolean canUserMaintainSession(Context context, EPerson ePerson) throws SQLException { + if (authorizeService.isAdmin(context, ePerson)) { + return true; + } + List active = findAllActive(context, 1, 0); + if (active == null || active.isEmpty()) { + return true; + } + return active.get(0).getAllowSessions() != AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY; + } +} diff --git a/dspace-api/src/main/java/org/dspace/alerts/dao/SystemWideAlertDAO.java b/dspace-api/src/main/java/org/dspace/alerts/dao/SystemWideAlertDAO.java new file mode 100644 index 000000000000..b26b64758355 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/alerts/dao/SystemWideAlertDAO.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.alerts.dao; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.alerts.SystemWideAlert; +import org.dspace.core.Context; +import org.dspace.core.GenericDAO; + +/** + * This is the Data Access Object for the {@link SystemWideAlert} object + */ +public interface SystemWideAlertDAO extends GenericDAO { + + /** + * Returns a list of all SystemWideAlert objects in the database + * + * @param context The relevant DSpace context + * @param limit The limit for the amount of SystemWideAlerts returned + * @param offset The offset for the Processes to be returned + * @return The list of all SystemWideAlert objects in the Database + * @throws SQLException If something goes wrong + */ + List findAll(Context context, int limit, int offset) throws SQLException; + + /** + * Returns a list of all active SystemWideAlert objects in the database + * + * @param context The relevant DSpace context + * @param limit The limit for the amount of SystemWideAlerts returned + * @param offset The offset for the Processes to be returned + * @return The list of all SystemWideAlert objects in the Database + * @throws SQLException If something goes wrong + */ + List findAllActive(Context context, int limit, int offset) throws SQLException; + + +} diff --git a/dspace-api/src/main/java/org/dspace/alerts/dao/impl/SystemWideAlertDAOImpl.java b/dspace-api/src/main/java/org/dspace/alerts/dao/impl/SystemWideAlertDAOImpl.java new file mode 100644 index 000000000000..13a0e0af236a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/alerts/dao/impl/SystemWideAlertDAOImpl.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.alerts.dao.impl; + +import java.sql.SQLException; +import java.util.List; +import javax.persistence.criteria.CriteriaBuilder; +import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Root; + +import org.dspace.alerts.SystemWideAlert; +import org.dspace.alerts.SystemWideAlert_; +import org.dspace.alerts.dao.SystemWideAlertDAO; +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.core.Context; + +/** + * Implementation class for the {@link SystemWideAlertDAO} + */ +public class SystemWideAlertDAOImpl extends AbstractHibernateDAO implements SystemWideAlertDAO { + + public List findAll(final Context context, final int limit, final int offset) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SystemWideAlert.class); + Root alertRoot = criteriaQuery.from(SystemWideAlert.class); + criteriaQuery.select(alertRoot); + + return list(context, criteriaQuery, false, SystemWideAlert.class, limit, offset); + } + + public List findAllActive(final Context context, final int limit, final int offset) + throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SystemWideAlert.class); + Root alertRoot = criteriaQuery.from(SystemWideAlert.class); + criteriaQuery.select(alertRoot); + criteriaQuery.where(criteriaBuilder.equal(alertRoot.get(SystemWideAlert_.active), true)); + + return list(context, criteriaQuery, false, SystemWideAlert.class, limit, offset); + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/alerts/service/SystemWideAlertService.java b/dspace-api/src/main/java/org/dspace/alerts/service/SystemWideAlertService.java new file mode 100644 index 000000000000..cf231308849d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/alerts/service/SystemWideAlertService.java @@ -0,0 +1,118 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.alerts.service; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Date; +import java.util.List; + +import org.dspace.alerts.AllowSessionsEnum; +import org.dspace.alerts.SystemWideAlert; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; + +/** + * An interface for the SystemWideAlertService with methods regarding the SystemWideAlert workload + */ +public interface SystemWideAlertService { + + /** + * This method will create a SystemWideAlert object in the database + * + * @param context The relevant DSpace context + * @param message The message of the system-wide alert + * @param allowSessionsType Which sessions need to be allowed for the system-wide alert + * @param countdownTo The date to which to count down to when the system-wide alert is active + * @param active Whether the system-wide alert os active + * @return The created SystemWideAlert object + * @throws SQLException If something goes wrong + */ + SystemWideAlert create(Context context, String message, AllowSessionsEnum allowSessionsType, + Date countdownTo, boolean active + ) throws SQLException, AuthorizeException; + + /** + * This method will retrieve a SystemWideAlert object from the Database with the given ID + * + * @param context The relevant DSpace context + * @param alertId The alert id on which we'll search for in the database + * @return The system-wide alert that holds the given alert id + * @throws SQLException If something goes wrong + */ + SystemWideAlert find(Context context, int alertId) throws SQLException; + + /** + * Returns a list of all SystemWideAlert objects in the database + * + * @param context The relevant DSpace context + * @return The list of all SystemWideAlert objects in the Database + * @throws SQLException If something goes wrong + */ + List findAll(Context context) throws SQLException; + + /** + * Returns a list of all SystemWideAlert objects in the database + * + * @param context The relevant DSpace context + * @param limit The limit for the amount of system-wide alerts returned + * @param offset The offset for the system-wide alerts to be returned + * @return The list of all SystemWideAlert objects in the Database + * @throws SQLException If something goes wrong + */ + List findAll(Context context, int limit, int offset) throws SQLException; + + + /** + * Returns a list of all active SystemWideAlert objects in the database + * + * @param context The relevant DSpace context + * @return The list of all active SystemWideAlert objects in the database + * @throws SQLException If something goes wrong + */ + List findAllActive(Context context, int limit, int offset) throws SQLException; + + /** + * This method will delete the given SystemWideAlert object from the database + * + * @param context The relevant DSpace context + * @param systemWideAlert The SystemWideAlert object to be deleted + * @throws SQLException If something goes wrong + */ + void delete(Context context, SystemWideAlert systemWideAlert) + throws SQLException, IOException, AuthorizeException; + + + /** + * This method will be used to update the given SystemWideAlert object in the database + * + * @param context The relevant DSpace context + * @param systemWideAlert The SystemWideAlert object to be updated + * @throws SQLException If something goes wrong + */ + void update(Context context, SystemWideAlert systemWideAlert) throws SQLException, AuthorizeException; + + + /** + * Verifies if the user connected to the current context can retain its session + * + * @param context The relevant DSpace context + * @return if the user connected to the current context can retain its session + */ + boolean canUserMaintainSession(Context context, EPerson ePerson) throws SQLException; + + + /** + * Verifies if a non admin user can log in + * + * @param context The relevant DSpace context + * @return if a non admin user can log in + */ + boolean canNonAdminUserLogin(Context context) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java new file mode 100644 index 000000000000..7bef232f0450 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControl.java @@ -0,0 +1,689 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import static org.apache.commons.collections4.CollectionUtils.isEmpty; +import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; +import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED; +import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME; + +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.TimeZone; +import java.util.UUID; +import java.util.function.Function; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.bulkaccesscontrol.exception.BulkAccessControlException; +import org.dspace.app.bulkaccesscontrol.model.AccessCondition; +import org.dspace.app.bulkaccesscontrol.model.AccessConditionBitstream; +import org.dspace.app.bulkaccesscontrol.model.AccessConditionItem; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessControlInput; +import org.dspace.app.bulkaccesscontrol.service.BulkAccessConditionConfigurationService; +import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory; +import org.dspace.app.mediafilter.service.MediaFilterService; +import org.dspace.app.util.DSpaceObjectUtilsImpl; +import org.dspace.app.util.service.DSpaceObjectUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.model.AccessConditionOption; +import org.dspace.utils.DSpace; + +/** + * Implementation of {@link DSpaceRunnable} to perform a bulk access control via json file. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControl extends DSpaceRunnable> { + + private DSpaceObjectUtils dSpaceObjectUtils; + + private SearchService searchService; + + private ItemService itemService; + + private String filename; + + private List uuids; + + private Context context; + + private BulkAccessConditionConfigurationService bulkAccessConditionConfigurationService; + + private ResourcePolicyService resourcePolicyService; + + protected EPersonService epersonService; + + private ConfigurationService configurationService; + + private MediaFilterService mediaFilterService; + + private Map itemAccessConditions; + + private Map uploadAccessConditions; + + private final String ADD_MODE = "add"; + + private final String REPLACE_MODE = "replace"; + + private boolean help = false; + + protected String eperson = null; + + @Override + @SuppressWarnings("unchecked") + public void setup() throws ParseException { + + this.searchService = SearchUtils.getSearchService(); + this.itemService = ContentServiceFactory.getInstance().getItemService(); + this.resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + this.epersonService = EPersonServiceFactory.getInstance().getEPersonService(); + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService(); + mediaFilterService.setLogHandler(handler); + this.bulkAccessConditionConfigurationService = new DSpace().getServiceManager().getServiceByName( + "bulkAccessConditionConfigurationService", BulkAccessConditionConfigurationService.class); + this.dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName( + DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class); + + BulkAccessConditionConfiguration bulkAccessConditionConfiguration = + bulkAccessConditionConfigurationService.getBulkAccessConditionConfiguration("default"); + + itemAccessConditions = bulkAccessConditionConfiguration + .getItemAccessConditionOptions() + .stream() + .collect(Collectors.toMap(AccessConditionOption::getName, Function.identity())); + + uploadAccessConditions = bulkAccessConditionConfiguration + .getBitstreamAccessConditionOptions() + .stream() + .collect(Collectors.toMap(AccessConditionOption::getName, Function.identity())); + + help = commandLine.hasOption('h'); + filename = commandLine.getOptionValue('f'); + uuids = commandLine.hasOption('u') ? Arrays.asList(commandLine.getOptionValues('u')) : null; + } + + @Override + public void internalRun() throws Exception { + + if (help) { + printHelp(); + return; + } + + ObjectMapper mapper = new ObjectMapper(); + mapper.setTimeZone(TimeZone.getTimeZone("UTC")); + BulkAccessControlInput accessControl; + context = new Context(Context.Mode.BATCH_EDIT); + setEPerson(context); + + if (!isAuthorized(context)) { + handler.logError("Current user is not eligible to execute script bulk-access-control"); + throw new AuthorizeException("Current user is not eligible to execute script bulk-access-control"); + } + + if (uuids == null || uuids.size() == 0) { + handler.logError("A target uuid must be provided with at least on uuid (run with -h flag for details)"); + throw new IllegalArgumentException("At least one target uuid must be provided"); + } + + InputStream inputStream = handler.getFileStream(context, filename) + .orElseThrow(() -> new IllegalArgumentException("Error reading file, the file couldn't be " + + "found for filename: " + filename)); + + try { + accessControl = mapper.readValue(inputStream, BulkAccessControlInput.class); + } catch (IOException e) { + handler.logError("Error parsing json file " + e.getMessage()); + throw new IllegalArgumentException("Error parsing json file", e); + } + try { + validate(accessControl); + updateItemsAndBitstreamsPolices(accessControl); + context.complete(); + } catch (Exception e) { + handler.handleException(e); + context.abort(); + } + } + + /** + * check the validation of mapped json data, it must + * provide item or bitstream information or both of them + * and check the validation of item node if provided, + * and check the validation of bitstream node if provided. + * + * @param accessControl mapped json data + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if accessControl is invalid + */ + private void validate(BulkAccessControlInput accessControl) throws SQLException { + + AccessConditionItem item = accessControl.getItem(); + AccessConditionBitstream bitstream = accessControl.getBitstream(); + + if (Objects.isNull(item) && Objects.isNull(bitstream)) { + handler.logError("item or bitstream node must be provided"); + throw new BulkAccessControlException("item or bitstream node must be provided"); + } + + if (Objects.nonNull(item)) { + validateItemNode(item); + } + + if (Objects.nonNull(bitstream)) { + validateBitstreamNode(bitstream); + } + } + + /** + * check the validation of item node, the item mode + * must be provided with value 'add' or 'replace' + * if mode equals to add so the information + * of accessCondition must be provided, + * also checking that accessConditions information are valid. + * + * @param item the item node + * @throws BulkAccessControlException if item node is invalid + */ + private void validateItemNode(AccessConditionItem item) { + String mode = item.getMode(); + List accessConditions = item.getAccessConditions(); + + if (StringUtils.isEmpty(mode)) { + handler.logError("item mode node must be provided"); + throw new BulkAccessControlException("item mode node must be provided"); + } else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) { + handler.logError("wrong value for item mode<" + mode + ">"); + throw new BulkAccessControlException("wrong value for item mode<" + mode + ">"); + } else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) { + handler.logError("accessConditions of item must be provided with mode<" + ADD_MODE + ">"); + throw new BulkAccessControlException( + "accessConditions of item must be provided with mode<" + ADD_MODE + ">"); + } + + for (AccessCondition accessCondition : accessConditions) { + validateAccessCondition(accessCondition); + } + } + + /** + * check the validation of bitstream node, the bitstream mode + * must be provided with value 'add' or 'replace' + * if mode equals to add so the information of accessConditions + * must be provided, + * also checking that constraint information is valid, + * also checking that accessConditions information are valid. + * + * @param bitstream the bitstream node + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if bitstream node is invalid + */ + private void validateBitstreamNode(AccessConditionBitstream bitstream) throws SQLException { + String mode = bitstream.getMode(); + List accessConditions = bitstream.getAccessConditions(); + + if (StringUtils.isEmpty(mode)) { + handler.logError("bitstream mode node must be provided"); + throw new BulkAccessControlException("bitstream mode node must be provided"); + } else if (!(StringUtils.equalsAny(mode, ADD_MODE, REPLACE_MODE))) { + handler.logError("wrong value for bitstream mode<" + mode + ">"); + throw new BulkAccessControlException("wrong value for bitstream mode<" + mode + ">"); + } else if (ADD_MODE.equals(mode) && isEmpty(accessConditions)) { + handler.logError("accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">"); + throw new BulkAccessControlException( + "accessConditions of bitstream must be provided with mode<" + ADD_MODE + ">"); + } + + validateConstraint(bitstream); + + for (AccessCondition accessCondition : bitstream.getAccessConditions()) { + validateAccessCondition(accessCondition); + } + } + + /** + * check the validation of constraint node if provided, + * constraint isn't supported when multiple uuids are provided + * or when uuid isn't an Item + * + * @param bitstream the bitstream node + * @throws SQLException if something goes wrong in the database + * @throws BulkAccessControlException if constraint node is invalid + */ + private void validateConstraint(AccessConditionBitstream bitstream) throws SQLException { + if (uuids.size() > 1 && containsConstraints(bitstream)) { + handler.logError("constraint isn't supported when multiple uuids are provided"); + throw new BulkAccessControlException("constraint isn't supported when multiple uuids are provided"); + } else if (uuids.size() == 1 && containsConstraints(bitstream)) { + DSpaceObject dso = + dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(0))); + + if (Objects.nonNull(dso) && dso.getType() != Constants.ITEM) { + handler.logError("constraint is not supported when uuid isn't an Item"); + throw new BulkAccessControlException("constraint is not supported when uuid isn't an Item"); + } + } + } + + /** + * check the validation of access condition, + * the access condition name must equal to one of configured access conditions, + * then call {@link AccessConditionOption#validateResourcePolicy( + * Context, String, Date, Date)} if exception happens so, it's invalid. + * + * @param accessCondition the accessCondition + * @throws BulkAccessControlException if the accessCondition is invalid + */ + private void validateAccessCondition(AccessCondition accessCondition) { + + if (!itemAccessConditions.containsKey(accessCondition.getName())) { + handler.logError("wrong access condition <" + accessCondition.getName() + ">"); + throw new BulkAccessControlException("wrong access condition <" + accessCondition.getName() + ">"); + } + + try { + itemAccessConditions.get(accessCondition.getName()).validateResourcePolicy( + context, accessCondition.getName(), accessCondition.getStartDate(), accessCondition.getEndDate()); + } catch (Exception e) { + handler.logError("invalid access condition, " + e.getMessage()); + handler.handleException(e); + } + } + + /** + * find all items of provided {@link #uuids} from solr, + * then update the resource policies of items + * or bitstreams of items (only bitstreams of ORIGINAL bundles) + * and derivative bitstreams, or both of them. + * + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws SearchServiceException if a search error occurs + * @throws AuthorizeException if an authorization error occurs + */ + private void updateItemsAndBitstreamsPolices(BulkAccessControlInput accessControl) + throws SQLException, SearchServiceException, AuthorizeException { + + int counter = 0; + int start = 0; + int limit = 20; + + String query = buildSolrQuery(uuids); + + Iterator itemIterator = findItems(query, start, limit); + + while (itemIterator.hasNext()) { + + Item item = context.reloadEntity(itemIterator.next()); + + if (Objects.nonNull(accessControl.getItem())) { + updateItemPolicies(item, accessControl); + } + + if (Objects.nonNull(accessControl.getBitstream())) { + updateBitstreamsPolicies(item, accessControl); + } + + context.commit(); + context.uncacheEntity(item); + counter++; + + if (counter == limit) { + counter = 0; + start += limit; + itemIterator = findItems(query, start, limit); + } + } + } + + private String buildSolrQuery(List uuids) throws SQLException { + String [] query = new String[uuids.size()]; + + for (int i = 0 ; i < query.length ; i++) { + DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(uuids.get(i))); + + if (dso.getType() == Constants.COMMUNITY) { + query[i] = "location.comm:" + dso.getID(); + } else if (dso.getType() == Constants.COLLECTION) { + query[i] = "location.coll:" + dso.getID(); + } else if (dso.getType() == Constants.ITEM) { + query[i] = "search.resourceid:" + dso.getID(); + } + } + return StringUtils.joinWith(" OR ", query); + } + + private Iterator findItems(String query, int start, int limit) + throws SearchServiceException { + + DiscoverQuery discoverQuery = buildDiscoveryQuery(query, start, limit); + + return searchService.search(context, discoverQuery) + .getIndexableObjects() + .stream() + .map(indexableObject -> + ((IndexableItem) indexableObject).getIndexedObject()) + .collect(Collectors.toList()) + .iterator(); + } + + private DiscoverQuery buildDiscoveryQuery(String query, int start, int limit) { + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + discoverQuery.setQuery(query); + discoverQuery.setStart(start); + discoverQuery.setMaxResults(limit); + + return discoverQuery; + } + + /** + * update the item resource policies, + * when mode equals to 'replace' will remove + * all current resource polices of types 'TYPE_CUSTOM' + * and 'TYPE_INHERITED' then, set the new resource policies. + * + * @param item the item + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ + private void updateItemPolicies(Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + + AccessConditionItem acItem = accessControl.getItem(); + + if (REPLACE_MODE.equals(acItem.getMode())) { + removeReadPolicies(item, TYPE_CUSTOM); + removeReadPolicies(item, TYPE_INHERITED); + } + + setItemPolicies(item, accessControl); + logInfo(acItem.getAccessConditions(), acItem.getMode(), item); + } + + /** + * create the new resource policies of item. + * then, call {@link ItemService#adjustItemPolicies( + * Context, Item, Collection)} to adjust item's default policies. + * + * @param item the item + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ + private void setItemPolicies(Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + + accessControl + .getItem() + .getAccessConditions() + .forEach(accessCondition -> createResourcePolicy(item, accessCondition, + itemAccessConditions.get(accessCondition.getName()))); + + itemService.adjustItemPolicies(context, item, item.getOwningCollection(), false); + } + + /** + * update the resource policies of all item's bitstreams + * or bitstreams specified into constraint node, + * and derivative bitstreams. + * + * NOTE: only bitstreams of ORIGINAL bundles + * + * @param item the item contains bitstreams + * @param accessControl the access control input + */ + private void updateBitstreamsPolicies(Item item, BulkAccessControlInput accessControl) { + AccessConditionBitstream.Constraint constraints = accessControl.getBitstream().getConstraints(); + + // look over all the bundles and force initialization of bitstreams collection + // to avoid lazy initialization exception + long count = item.getBundles() + .stream() + .flatMap(bundle -> + bundle.getBitstreams().stream()) + .count(); + + item.getBundles(CONTENT_BUNDLE_NAME).stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .filter(bitstream -> constraints == null || + constraints.getUuid() == null || + constraints.getUuid().size() == 0 || + constraints.getUuid().contains(bitstream.getID().toString())) + .forEach(bitstream -> updateBitstreamPolicies(bitstream, item, accessControl)); + } + + /** + * check that the bitstream node is existed, + * and contains constraint node, + * and constraint contains uuids. + * + * @param bitstream the bitstream node + * @return true when uuids of constraint of bitstream is not empty, + * otherwise false + */ + private boolean containsConstraints(AccessConditionBitstream bitstream) { + return Objects.nonNull(bitstream) && + Objects.nonNull(bitstream.getConstraints()) && + isNotEmpty(bitstream.getConstraints().getUuid()); + } + + /** + * update the bitstream resource policies, + * when mode equals to replace will remove + * all current resource polices of types 'TYPE_CUSTOM' + * and 'TYPE_INHERITED' then, set the new resource policies. + * + * @param bitstream the bitstream + * @param item the item of bitstream + * @param accessControl the access control input + * @throws RuntimeException if something goes wrong in the database + * or an authorization error occurs + */ + private void updateBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) { + + AccessConditionBitstream acBitstream = accessControl.getBitstream(); + + if (REPLACE_MODE.equals(acBitstream.getMode())) { + removeReadPolicies(bitstream, TYPE_CUSTOM); + removeReadPolicies(bitstream, TYPE_INHERITED); + } + + try { + setBitstreamPolicies(bitstream, item, accessControl); + logInfo(acBitstream.getAccessConditions(), acBitstream.getMode(), bitstream); + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + + } + + /** + * remove dspace object's read policies. + * + * @param dso the dspace object + * @param type resource policy type + * @throws BulkAccessControlException if something goes wrong + * in the database or an authorization error occurs + */ + private void removeReadPolicies(DSpaceObject dso, String type) { + try { + resourcePolicyService.removePolicies(context, dso, type, Constants.READ); + } catch (SQLException | AuthorizeException e) { + throw new BulkAccessControlException(e); + } + } + + /** + * create the new resource policies of bitstream. + * then, call {@link ItemService#adjustItemPolicies( + * Context, Item, Collection)} to adjust bitstream's default policies. + * and also update the resource policies of its derivative bitstreams. + * + * @param bitstream the bitstream + * @param item the item of bitstream + * @param accessControl the access control input + * @throws SQLException if something goes wrong in the database + * @throws AuthorizeException if an authorization error occurs + */ + private void setBitstreamPolicies(Bitstream bitstream, Item item, BulkAccessControlInput accessControl) + throws SQLException, AuthorizeException { + + accessControl.getBitstream() + .getAccessConditions() + .forEach(accessCondition -> createResourcePolicy(bitstream, accessCondition, + uploadAccessConditions.get(accessCondition.getName()))); + + itemService.adjustBitstreamPolicies(context, item, item.getOwningCollection(), bitstream); + mediaFilterService.updatePoliciesOfDerivativeBitstreams(context, item, bitstream); + } + + /** + * create the resource policy from the information + * comes from the access condition. + * + * @param obj the dspace object + * @param accessCondition the access condition + * @param accessConditionOption the access condition option + * @throws BulkAccessControlException if an exception occurs + */ + private void createResourcePolicy(DSpaceObject obj, AccessCondition accessCondition, + AccessConditionOption accessConditionOption) { + + String name = accessCondition.getName(); + String description = accessCondition.getDescription(); + Date startDate = accessCondition.getStartDate(); + Date endDate = accessCondition.getEndDate(); + + try { + accessConditionOption.createResourcePolicy(context, obj, name, description, startDate, endDate); + } catch (Exception e) { + throw new BulkAccessControlException(e); + } + } + + /** + * Set the eperson in the context + * + * @param context the context + * @throws SQLException if database error + */ + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier()); + + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier()); + throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier()); + } + + context.setCurrentUser(myEPerson); + } + + private void logInfo(List accessConditions, String mode, DSpaceObject dso) { + String type = dso.getClass().getSimpleName(); + + if (REPLACE_MODE.equals(mode) && isEmpty(accessConditions)) { + handler.logInfo("Cleaning " + type + " {" + dso.getID() + "} policies"); + handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}"); + return; + } + + StringBuilder message = new StringBuilder(); + message.append(mode.equals(ADD_MODE) ? "Adding " : "Replacing ") + .append(type) + .append(" {") + .append(dso.getID()) + .append("} policy") + .append(mode.equals(ADD_MODE) ? " with " : " to ") + .append("access conditions:"); + + AppendAccessConditionsInfo(message, accessConditions); + + handler.logInfo(message.toString()); + + if (REPLACE_MODE.equals(mode) && isAppendModeEnabled()) { + handler.logInfo("Inheriting policies from owning Collection in " + type + " {" + dso.getID() + "}"); + } + } + + private void AppendAccessConditionsInfo(StringBuilder message, List accessConditions) { + DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); + message.append("{"); + + for (int i = 0; i < accessConditions.size(); i++) { + message.append(accessConditions.get(i).getName()); + + Optional.ofNullable(accessConditions.get(i).getStartDate()) + .ifPresent(date -> message.append(", start_date=" + dateFormat.format(date))); + + Optional.ofNullable(accessConditions.get(i).getEndDate()) + .ifPresent(date -> message.append(", end_date=" + dateFormat.format(date))); + + if (i != accessConditions.size() - 1) { + message.append(", "); + } + } + + message.append("}"); + } + + private boolean isAppendModeEnabled() { + return configurationService.getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode"); + } + + protected boolean isAuthorized(Context context) { + return true; + } + + @Override + @SuppressWarnings("unchecked") + public BulkAccessControlScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("bulk-access-control", BulkAccessControlScriptConfiguration.class); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java new file mode 100644 index 000000000000..4e8cfe480eeb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCli.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import java.sql.SQLException; +import java.util.Arrays; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.scripts.DSpaceCommandLineParameter; + +/** + * Extension of {@link BulkAccessControl} for CLI. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlCli extends BulkAccessControl { + + @Override + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson; + eperson = commandLine.getOptionValue('e'); + + if (eperson == null) { + handler.logError("An eperson to do the the Bulk Access Control must be specified " + + "(run with -h flag for details)"); + throw new UnsupportedOperationException("An eperson to do the Bulk Access Control must be specified"); + } + + if (StringUtils.contains(eperson, '@')) { + myEPerson = epersonService.findByEmail(context, eperson); + } else { + myEPerson = epersonService.find(context, UUID.fromString(eperson)); + } + + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)"); + throw new UnsupportedOperationException("EPerson cannot be found: " + eperson); + } + + context.setCurrentUser(myEPerson); + } + + @Override + protected boolean isAuthorized(Context context) { + + if (context.getCurrentUser() == null) { + return false; + } + + return getScriptConfiguration().isAllowedToExecute(context, + Arrays.stream(commandLine.getOptions()) + .map(option -> + new DSpaceCommandLineParameter("-" + option.getOpt(), option.getValue())) + .collect(Collectors.toList())); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java new file mode 100644 index 000000000000..951c93db3030 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlCliScriptConfiguration.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import java.io.InputStream; + +import org.apache.commons.cli.Options; + +/** + * Extension of {@link BulkAccessControlScriptConfiguration} for CLI. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlCliScriptConfiguration + extends BulkAccessControlScriptConfiguration { + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption("u", "uuid", true, "target uuids of communities/collections/items"); + options.getOption("u").setType(String.class); + options.getOption("u").setRequired(true); + + options.addOption("f", "file", true, "source json file"); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(true); + + options.addOption("e", "eperson", true, "email of EPerson used to perform actions"); + options.getOption("e").setRequired(true); + + options.addOption("h", "help", false, "help"); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java new file mode 100644 index 000000000000..5196247f94cb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlScriptConfiguration.java @@ -0,0 +1,110 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import java.io.InputStream; +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.cli.Options; +import org.dspace.app.util.DSpaceObjectUtilsImpl; +import org.dspace.app.util.service.DSpaceObjectUtils; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.utils.DSpace; + +/** + * Script configuration for {@link BulkAccessControl}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + * @param the {@link BulkAccessControl} type + */ +public class BulkAccessControlScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + + try { + if (Objects.isNull(commandLineParameters)) { + return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) + || authorizeService.isItemAdmin(context); + } else { + List dspaceObjectIDs = + commandLineParameters.stream() + .filter(parameter -> "-u".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .collect(Collectors.toList()); + + DSpaceObjectUtils dSpaceObjectUtils = new DSpace().getServiceManager().getServiceByName( + DSpaceObjectUtilsImpl.class.getName(), DSpaceObjectUtilsImpl.class); + + for (String dspaceObjectID : dspaceObjectIDs) { + + DSpaceObject dso = dSpaceObjectUtils.findDSpaceObject(context, UUID.fromString(dspaceObjectID)); + + if (Objects.isNull(dso)) { + throw new IllegalArgumentException(); + } + + if (!authorizeService.isAdmin(context, dso)) { + return false; + } + } + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + + return true; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("u", "uuid", true, "target uuids of communities/collections/items"); + options.getOption("u").setType(String.class); + options.getOption("u").setRequired(true); + + options.addOption("f", "file", true, "source json file"); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(true); + + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + /** + * Generic setter for the dspaceRunnableClass + * + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this + * BulkImportScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java new file mode 100644 index 000000000000..092611eb0654 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/exception/BulkAccessControlException.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.exception; + +/** + * Exception for errors that occurs during the bulk access control + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + * + */ +public class BulkAccessControlException extends RuntimeException { + + private static final long serialVersionUID = -74730626862418515L; + + /** + * Constructor with error message and cause. + * + * @param message the error message + * @param cause the error cause + */ + public BulkAccessControlException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructor with error message. + * + * @param message the error message + */ + public BulkAccessControlException(String message) { + super(message); + } + + /** + * Constructor with error cause. + * + * @param cause the error cause + */ + public BulkAccessControlException(Throwable cause) { + super(cause); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java new file mode 100644 index 000000000000..6cf95e0e2179 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessCondition.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.Date; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; +import org.dspace.util.MultiFormatDateDeserializer; + +/** + * Class that model the values of an Access Condition as expressed in the {@link BulkAccessControl} input file + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessCondition { + + private String name; + + private String description; + + @JsonDeserialize(using = MultiFormatDateDeserializer.class) + private Date startDate; + + @JsonDeserialize(using = MultiFormatDateDeserializer.class) + private Date endDate; + + public AccessCondition() { + } + + public AccessCondition(String name, String description, Date startDate, Date endDate) { + this.name = name; + this.description = description; + this.startDate = startDate; + this.endDate = endDate; + } + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + public Date getStartDate() { + return startDate; + } + + public Date getEndDate() { + return endDate; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java new file mode 100644 index 000000000000..2176e24d7f9d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionBitstream.java @@ -0,0 +1,69 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.ArrayList; +import java.util.List; + +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + +/** + * Class that model the value of bitstream node + * from json file of the {@link BulkAccessControl} + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessConditionBitstream { + + private String mode; + + private Constraint constraints; + + private List accessConditions; + + public String getMode() { + return mode; + } + + public void setMode(String mode) { + this.mode = mode; + } + + public Constraint getConstraints() { + return constraints; + } + + public void setConstraints(Constraint constraints) { + this.constraints = constraints; + } + + public List getAccessConditions() { + if (accessConditions == null) { + return new ArrayList<>(); + } + return accessConditions; + } + + public void setAccessConditions(List accessConditions) { + this.accessConditions = accessConditions; + } + + public class Constraint { + + private List uuid; + + public List getUuid() { + return uuid; + } + + public void setUuid(List uuid) { + this.uuid = uuid; + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java new file mode 100644 index 000000000000..c482dfc34d65 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/AccessConditionItem.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.ArrayList; +import java.util.List; + +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + +/** + * Class that model the value of item node + * from json file of the {@link BulkAccessControl} + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class AccessConditionItem { + + String mode; + + List accessConditions; + + public String getMode() { + return mode; + } + + public void setMode(String mode) { + this.mode = mode; + } + + public List getAccessConditions() { + if (accessConditions == null) { + return new ArrayList<>(); + } + return accessConditions; + } + + public void setAccessConditions(List accessConditions) { + this.accessConditions = accessConditions; + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessConditionConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessConditionConfiguration.java new file mode 100644 index 000000000000..a2ebbe5a12d4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessConditionConfiguration.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import java.util.List; + +import org.dspace.submit.model.AccessConditionOption; + +/** + * A collection of conditions to be met when bulk access condition. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessConditionConfiguration { + + private String name; + private List itemAccessConditionOptions; + private List bitstreamAccessConditionOptions; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public List getItemAccessConditionOptions() { + return itemAccessConditionOptions; + } + + public void setItemAccessConditionOptions( + List itemAccessConditionOptions) { + this.itemAccessConditionOptions = itemAccessConditionOptions; + } + + public List getBitstreamAccessConditionOptions() { + return bitstreamAccessConditionOptions; + } + + public void setBitstreamAccessConditionOptions( + List bitstreamAccessConditionOptions) { + this.bitstreamAccessConditionOptions = bitstreamAccessConditionOptions; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java new file mode 100644 index 000000000000..0f8852a71f7d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/model/BulkAccessControlInput.java @@ -0,0 +1,72 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.model; + +import org.dspace.app.bulkaccesscontrol.BulkAccessControl; + +/** + * Class that model the content of the JSON file used as input for the {@link BulkAccessControl} + * + *
+ * {
+ * item: {
+ * mode: "replace",
+ * accessConditions: [
+ * {
+ * "name": "openaccess"
+ * }
+ * ]
+ * },
+ * bitstream: {
+ * constraints: {
+ * uuid: [bit-uuid1, bit-uuid2, ..., bit-uuidN],
+ * },
+ * mode: "add",
+ * accessConditions: [
+ * {
+ * "name": "embargo",
+ * "startDate": "2024-06-24T23:59:59.999+0000"
+ * }
+ * ]
+ * }
+ * } + *
+ * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessControlInput { + + AccessConditionItem item; + + AccessConditionBitstream bitstream; + + public BulkAccessControlInput() { + } + + public BulkAccessControlInput(AccessConditionItem item, + AccessConditionBitstream bitstream) { + this.item = item; + this.bitstream = bitstream; + } + + public AccessConditionItem getItem() { + return item; + } + + public void setItem(AccessConditionItem item) { + this.item = item; + } + + public AccessConditionBitstream getBitstream() { + return bitstream; + } + + public void setBitstream(AccessConditionBitstream bitstream) { + this.bitstream = bitstream; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/service/BulkAccessConditionConfigurationService.java b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/service/BulkAccessConditionConfigurationService.java new file mode 100644 index 000000000000..321b6d928e92 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkaccesscontrol/service/BulkAccessConditionConfigurationService.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol.service; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.collections4.CollectionUtils; +import org.dspace.app.bulkaccesscontrol.model.BulkAccessConditionConfiguration; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Simple bean to manage different Bulk Access Condition configurations + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessConditionConfigurationService { + + @Autowired + private List bulkAccessConditionConfigurations; + + public List getBulkAccessConditionConfigurations() { + if (CollectionUtils.isEmpty(bulkAccessConditionConfigurations)) { + return new ArrayList<>(); + } + return bulkAccessConditionConfigurations; + } + + public BulkAccessConditionConfiguration getBulkAccessConditionConfiguration(String name) { + return getBulkAccessConditionConfigurations().stream() + .filter(x -> name.equals(x.getName())) + .findFirst() + .orElse(null); + } + + public void setBulkAccessConditionConfigurations( + List bulkAccessConditionConfigurations) { + this.bulkAccessConditionConfigurations = bulkAccessConditionConfigurations; + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/DSpaceCSV.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/DSpaceCSV.java index 053fed18d38f..cbc052b5573f 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/DSpaceCSV.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/DSpaceCSV.java @@ -138,7 +138,7 @@ public DSpaceCSV(boolean exportAll) { /** * Create a new instance, reading the lines in from file * - * @param inputStream the inputstream to read from + * @param inputStream the input stream to read from * @param c The DSpace Context * @throws Exception thrown if there is an error reading or processing the file */ @@ -159,7 +159,7 @@ public DSpaceCSV(InputStream inputStream, Context c) throws Exception { columnCounter++; // Remove surrounding quotes if there are any - if ((element.startsWith("\"")) && (element.endsWith("\""))) { + if (element.startsWith("\"") && element.endsWith("\"")) { element = element.substring(1, element.length() - 1); } @@ -337,15 +337,15 @@ public boolean hasActions() { /** * Set the value separator for multiple values stored in one csv value. * - * Is set in bulkedit.cfg as valueseparator + * Is set in {@code bulkedit.cfg} as {@code valueseparator}. * - * If not set, defaults to double pipe '||' + * If not set, defaults to double pipe '||'. */ private void setValueSeparator() { // Get the value separator valueSeparator = DSpaceServicesFactory.getInstance().getConfigurationService() .getProperty("bulkedit.valueseparator"); - if ((valueSeparator != null) && (!"".equals(valueSeparator.trim()))) { + if ((valueSeparator != null) && !valueSeparator.trim().isEmpty()) { valueSeparator = valueSeparator.trim(); } else { valueSeparator = "||"; @@ -360,7 +360,7 @@ private void setValueSeparator() { /** * Set the field separator use to separate fields in the csv. * - * Is set in bulkedit.cfg as fieldseparator + * Is set in {@code bulkedit.cfg} as {@code fieldseparator}. * * If not set, defaults to comma ','. * @@ -371,7 +371,7 @@ private void setFieldSeparator() { // Get the value separator fieldSeparator = DSpaceServicesFactory.getInstance().getConfigurationService() .getProperty("bulkedit.fieldseparator"); - if ((fieldSeparator != null) && (!"".equals(fieldSeparator.trim()))) { + if ((fieldSeparator != null) && !fieldSeparator.trim().isEmpty()) { fieldSeparator = fieldSeparator.trim(); if ("tab".equals(fieldSeparator)) { fieldSeparator = "\t"; @@ -395,15 +395,15 @@ private void setFieldSeparator() { /** * Set the authority separator for value with authority data. * - * Is set in dspace.cfg as bulkedit.authorityseparator + * Is set in {@code dspace.cfg} as {@code bulkedit.authorityseparator}. * - * If not set, defaults to double colon '::' + * If not set, defaults to double colon '::'. */ private void setAuthoritySeparator() { // Get the value separator authoritySeparator = DSpaceServicesFactory.getInstance().getConfigurationService() .getProperty("bulkedit.authorityseparator"); - if ((authoritySeparator != null) && (!"".equals(authoritySeparator.trim()))) { + if ((authoritySeparator != null) && !authoritySeparator.trim().isEmpty()) { authoritySeparator = authoritySeparator.trim(); } else { authoritySeparator = "::"; @@ -508,7 +508,7 @@ public final void addItem(String line) throws Exception { int i = 0; for (String part : bits) { int bitcounter = part.length() - part.replaceAll("\"", "").length(); - if ((part.startsWith("\"")) && ((!part.endsWith("\"")) || ((bitcounter & 1) == 1))) { + if (part.startsWith("\"") && (!part.endsWith("\"") || ((bitcounter & 1) == 1))) { found = true; String add = bits.get(i) + fieldSeparator + bits.get(i + 1); bits.remove(i); @@ -524,7 +524,7 @@ public final void addItem(String line) throws Exception { // Deal with quotes around the elements int i = 0; for (String part : bits) { - if ((part.startsWith("\"")) && (part.endsWith("\""))) { + if (part.startsWith("\"") && part.endsWith("\"")) { part = part.substring(1, part.length() - 1); bits.set(i, part); } @@ -564,7 +564,7 @@ public final void addItem(String line) throws Exception { for (String part : bits) { if (i > 0) { // Is this a last empty item? - if ((last) && (i == headings.size())) { + if (last && (i == headings.size())) { part = ""; } @@ -577,7 +577,7 @@ public final void addItem(String line) throws Exception { csvLine.add(headings.get(i - 1), null); String[] elements = part.split(escapedValueSeparator); for (String element : elements) { - if ((element != null) && (!"".equals(element))) { + if ((element != null) && !element.isEmpty()) { csvLine.add(headings.get(i - 1), element); } } @@ -629,18 +629,18 @@ public final String[] getCSVLinesAsStringArray() { public InputStream getInputStream() { StringBuilder stringBuilder = new StringBuilder(); for (String csvLine : getCSVLinesAsStringArray()) { - stringBuilder.append(csvLine + "\n"); + stringBuilder.append(csvLine).append("\n"); } return IOUtils.toInputStream(stringBuilder.toString(), StandardCharsets.UTF_8); } /** - * Is it Ok to export this value? When exportAll is set to false, we don't export + * Is it okay to export this value? When exportAll is set to false, we don't export * some of the metadata elements. * - * The list can be configured via the key ignore-on-export in bulkedit.cfg + * The list can be configured via the key ignore-on-export in {@code bulkedit.cfg}. * - * @param md The Metadatum to examine + * @param md The MetadataField to examine * @return Whether or not it is OK to export this element */ protected boolean okToExport(MetadataField md) { @@ -649,12 +649,8 @@ protected boolean okToExport(MetadataField md) { if (md.getQualifier() != null) { key += "." + md.getQualifier(); } - if (ignore.get(key) != null) { - return false; - } - // Must be OK, so don't ignore - return true; + return ignore.get(key) == null; } /** diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java index b8d41318db48..fb228e7041b8 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataDeletion} script. */ public class MetadataDeletionScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { @@ -41,10 +24,8 @@ public Options getOptions() { Options options = new Options(); options.addOption("m", "metadata", true, "metadata field name"); - options.getOption("m").setType(String.class); options.addOption("l", "list", false, "lists the metadata fields that can be deleted"); - options.getOption("l").setType(boolean.class); super.options = options; } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java index 0c513c466722..aa76c09c0a5b 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataExport} script */ public class MetadataExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,27 +31,15 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { Options options = new Options(); options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)"); - options.getOption("i").setType(String.class); options.addOption("a", "all", false, "include all metadata fields that are not normally changed (e.g. provenance)"); - options.getOption("a").setType(boolean.class); options.addOption("h", "help", false, "help"); - options.getOption("h").setType(boolean.class); super.options = options; diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearch.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearch.java new file mode 100644 index 000000000000..027ad116a7e2 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearch.java @@ -0,0 +1,170 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.bulkedit; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.UUID; + +import org.apache.commons.cli.ParseException; +import org.dspace.content.Item; +import org.dspace.content.MetadataDSpaceCsvExportServiceImpl; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.MetadataDSpaceCsvExportService; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.configuration.DiscoveryConfiguration; +import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.utils.DiscoverQueryBuilder; +import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.sort.SortOption; +import org.dspace.utils.DSpace; + +/** + * Metadata exporter to allow the batch export of metadata from a discovery search into a file + * + */ +public class MetadataExportSearch extends DSpaceRunnable { + private static final String EXPORT_CSV = "exportCSV"; + private boolean help = false; + private String identifier; + private String discoveryConfigName; + private String[] filterQueryStrings; + private boolean hasScope = false; + private String query; + + private SearchService searchService; + private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService; + private EPersonService ePersonService; + private DiscoveryConfigurationService discoveryConfigurationService; + private CommunityService communityService; + private CollectionService collectionService; + private DiscoverQueryBuilder queryBuilder; + + @Override + public MetadataExportSearchScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("metadata-export-search", MetadataExportSearchScriptConfiguration.class); + } + + @Override + public void setup() throws ParseException { + searchService = SearchUtils.getSearchService(); + metadataDSpaceCsvExportService = new DSpace().getServiceManager() + .getServiceByName( + MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(), + MetadataDSpaceCsvExportService.class + ); + ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + discoveryConfigurationService = SearchUtils.getConfigurationService(); + communityService = ContentServiceFactory.getInstance().getCommunityService(); + collectionService = ContentServiceFactory.getInstance().getCollectionService(); + queryBuilder = SearchUtils.getQueryBuilder(); + + if (commandLine.hasOption('h')) { + help = true; + return; + } + + if (commandLine.hasOption('q')) { + query = commandLine.getOptionValue('q'); + } + + if (commandLine.hasOption('s')) { + hasScope = true; + identifier = commandLine.getOptionValue('s'); + } + + if (commandLine.hasOption('c')) { + discoveryConfigName = commandLine.getOptionValue('c'); + } + + if (commandLine.hasOption('f')) { + filterQueryStrings = commandLine.getOptionValues('f'); + } + } + + @Override + public void internalRun() throws Exception { + if (help) { + loghelpinfo(); + printHelp(); + return; + } + handler.logDebug("starting search export"); + + IndexableObject dso = null; + Context context = new Context(); + context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier())); + + if (hasScope) { + dso = resolveScope(context, identifier); + } + + DiscoveryConfiguration discoveryConfiguration = + discoveryConfigurationService.getDiscoveryConfiguration(discoveryConfigName); + + List queryBuilderSearchFilters = new ArrayList<>(); + + handler.logDebug("processing filter queries"); + if (filterQueryStrings != null) { + for (String filterQueryString: filterQueryStrings) { + String field = filterQueryString.split(",", 2)[0]; + String operator = filterQueryString.split("(,|=)", 3)[1]; + String value = filterQueryString.split("=", 2)[1]; + QueryBuilderSearchFilter queryBuilderSearchFilter = + new QueryBuilderSearchFilter(field, operator, value); + queryBuilderSearchFilters.add(queryBuilderSearchFilter); + } + } + handler.logDebug("building query"); + DiscoverQuery discoverQuery = + queryBuilder.buildQuery(context, dso, discoveryConfiguration, query, queryBuilderSearchFilters, + "Item", 10, Long.getLong("0"), null, SortOption.DESCENDING); + handler.logDebug("creating iterator"); + + Iterator itemIterator = searchService.iteratorSearch(context, dso, discoverQuery); + handler.logDebug("creating dspacecsv"); + DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true); + handler.logDebug("writing to file " + getFileNameOrExportFile()); + handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV); + context.restoreAuthSystemState(); + context.complete(); + + } + + protected void loghelpinfo() { + handler.logInfo("metadata-export"); + } + + protected String getFileNameOrExportFile() { + return "metadataExportSearch.csv"; + } + + public IndexableObject resolveScope(Context context, String id) throws SQLException { + UUID uuid = UUID.fromString(id); + IndexableObject scopeObj = new IndexableCommunity(communityService.find(context, uuid)); + if (scopeObj.getIndexedObject() == null) { + scopeObj = new IndexableCollection(collectionService.find(context, uuid)); + } + return scopeObj; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCli.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCli.java new file mode 100644 index 000000000000..51ca77cbfb3a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCli.java @@ -0,0 +1,20 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.bulkedit; + +/** + * The cli version of the {@link MetadataExportSearch} script + */ +public class MetadataExportSearchCli extends MetadataExportSearch { + + @Override + protected String getFileNameOrExportFile() { + return commandLine.getOptionValue('n'); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCliScriptConfiguration.java new file mode 100644 index 000000000000..c0343f545a98 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchCliScriptConfiguration.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.bulkedit; + +import org.apache.commons.cli.Options; + +/** + * This is the CLI version of the {@link MetadataExportSearchScriptConfiguration} class that handles the + * configuration for the {@link MetadataExportSearchCli} script + */ +public class MetadataExportSearchCliScriptConfiguration + extends MetadataExportSearchScriptConfiguration { + + @Override + public Options getOptions() { + Options options = super.getOptions(); + options.addOption("n", "filename", true, "the filename to export to"); + return super.getOptions(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java new file mode 100644 index 000000000000..4f2a225d3ac6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java @@ -0,0 +1,56 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.bulkedit; + +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link MetadataExportSearch} script + */ +public class MetadataExportSearchScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableclass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableclass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableclass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + options.addOption("q", "query", true, + "The discovery search string to will be used to match records. Not URL encoded"); + options.getOption("q").setType(String.class); + options.addOption("s", "scope", true, + "UUID of a specific DSpace container (site, community or collection) to which the search has to be " + + "limited"); + options.getOption("s").setType(String.class); + options.addOption("c", "configuration", true, + "The name of a Discovery configuration that should be used by this search"); + options.getOption("c").setType(String.class); + options.addOption("f", "filter", true, + "Advanced search filter that has to be used to filter the result set, with syntax `<:filter-name>," + + "<:filter-operator>=<:filter-value>`. Not URL encoded. For example `author," + + "authority=5df05073-3be7-410d-8166-e254369e4166` or `title,contains=sample text`"); + options.getOption("f").setType(String.class); + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java index 0db0cc45be19..af6976acb14a 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java @@ -25,6 +25,7 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.dspace.app.util.RelationshipUtils; import org.dspace.authority.AuthorityValue; import org.dspace.authority.factory.AuthorityServiceFactory; import org.dspace.authority.service.AuthorityValueService; @@ -53,7 +54,7 @@ import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.handle.factory.HandleServiceFactory; @@ -577,6 +578,10 @@ public List runImport(Context c, boolean change, wfItem = workflowService.startWithoutNotify(c, wsItem); } } else { + // Add provenance info + String provenance = installItemService.getSubmittedByProvenanceMessage(c, wsItem.getItem()); + itemService.addMetadata(c, item, MetadataSchemaEnum.DC.getName(), + "description", "provenance", "en", provenance); // Install the item installItemService.installItem(c, wsItem); } @@ -597,18 +602,19 @@ public List runImport(Context c, boolean change, changes.add(whatHasChanged); } - if (change) { - //only clear cache if changes have been made. - c.uncacheEntity(wsItem); - c.uncacheEntity(wfItem); - c.uncacheEntity(item); + if (change && (rowCount % configurationService.getIntProperty("bulkedit.change.commit.count", 100) == 0)) { + c.commit(); + handler.logInfo(LogHelper.getHeader(c, "metadata_import_commit", "lineNumber=" + rowCount)); } populateRefAndRowMap(line, item == null ? null : item.getID()); // keep track of current rows processed rowCount++; } + if (change) { + c.commit(); + } - c.setMode(originalMode); + c.setMode(Context.Mode.READ_ONLY); // Return the changes @@ -640,7 +646,7 @@ protected void compareAndUpdate(Context c, Item item, String[] fromCSV, boolean all += part + ","; } all = all.substring(0, all.length()); - log.debug(LogManager.getHeader(c, "metadata_import", + log.debug(LogHelper.getHeader(c, "metadata_import", "item_id=" + item.getID() + ",fromCSV=" + all)); // Don't compare collections or actions or rowNames @@ -677,7 +683,7 @@ protected void compareAndUpdate(Context c, Item item, String[] fromCSV, boolean qualifier = qualifier.substring(0, qualifier.indexOf('[')); } } - log.debug(LogManager.getHeader(c, "metadata_import", + log.debug(LogHelper.getHeader(c, "metadata_import", "item_id=" + item.getID() + ",fromCSV=" + all + ",looking_for_schema=" + schema + ",looking_for_element=" + element + @@ -697,7 +703,7 @@ protected void compareAndUpdate(Context c, Item item, String[] fromCSV, boolean .getConfidence() : Choices.CF_ACCEPTED); } i++; - log.debug(LogManager.getHeader(c, "metadata_import", + log.debug(LogHelper.getHeader(c, "metadata_import", "item_id=" + item.getID() + ",fromCSV=" + all + ",found=" + dcv.getValue())); } @@ -748,7 +754,7 @@ protected void compareAndUpdate(Context c, Item item, String[] fromCSV, boolean // column "dc.contributor.author" so don't remove it if ((value != null) && (!"".equals(value)) && (!contains(value, fromCSV)) && fromAuthority == null) { // Remove it - log.debug(LogManager.getHeader(c, "metadata_import", + log.debug(LogHelper.getHeader(c, "metadata_import", "item_id=" + item.getID() + ",fromCSV=" + all + ",removing_schema=" + schema + ",removing_element=" + element + @@ -924,11 +930,10 @@ private void addRelationship(Context c, Item item, String typeName, String value rightItem = item; } - // Create the relationship - int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem); - int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem); - Relationship persistedRelationship = relationshipService.create(c, leftItem, rightItem, - foundRelationshipType, leftPlace, rightPlace); + // Create the relationship, appending to the end + Relationship persistedRelationship = relationshipService.create( + c, leftItem, rightItem, foundRelationshipType, -1, -1 + ); relationshipService.update(c, persistedRelationship); } @@ -1362,7 +1367,7 @@ private int displayChanges(List changes, boolean changed) { * is the field is defined as authority controlled */ private static boolean isAuthorityControlledField(String md) { - String mdf = StringUtils.substringAfter(md, ":"); + String mdf = md.contains(":") ? StringUtils.substringAfter(md, ":") : md; mdf = StringUtils.substringBefore(mdf, "["); return authorityControlled.contains(mdf); } @@ -1793,36 +1798,7 @@ private void validateTypesByTypeByTypeName(Context c, */ private RelationshipType matchRelationshipType(List relTypes, String targetType, String originType, String originTypeName) { - RelationshipType foundRelationshipType = null; - if (originTypeName.split("\\.").length > 1) { - originTypeName = originTypeName.split("\\.")[1]; - } - for (RelationshipType relationshipType : relTypes) { - // Is origin type leftward or righward - boolean isLeft = false; - if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType)) { - isLeft = true; - } - if (isLeft) { - // Validate typeName reference - if (!relationshipType.getLeftwardType().equalsIgnoreCase(originTypeName)) { - continue; - } - if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType) && - relationshipType.getRightType().getLabel().equalsIgnoreCase(targetType)) { - foundRelationshipType = relationshipType; - } - } else { - if (!relationshipType.getRightwardType().equalsIgnoreCase(originTypeName)) { - continue; - } - if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(targetType) && - relationshipType.getRightType().getLabel().equalsIgnoreCase(originType)) { - foundRelationshipType = relationshipType; - } - } - } - return foundRelationshipType; + return RelationshipUtils.matchRelationshipType(relTypes, targetType, originType, originTypeName); } } diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportCliScriptConfiguration.java index 038df616cae5..7e1537fe9d91 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportCliScriptConfiguration.java @@ -19,7 +19,6 @@ public class MetadataImportCliScriptConfiguration extends MetadataImportScriptCo public Options getOptions() { Options options = super.getOptions(); options.addOption("e", "email", true, "email address or user id of user (required if adding new items)"); - options.getOption("e").setType(String.class); options.getOption("e").setRequired(true); super.options = options; return options; diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java index 07e6a9aec96e..ce2f7fb68af1 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java @@ -8,22 +8,15 @@ package org.dspace.app.bulkedit; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataImport} script */ public class MetadataImportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -40,15 +33,6 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { @@ -59,20 +43,14 @@ public Options getOptions() { options.getOption("f").setRequired(true); options.addOption("s", "silent", false, "silent operation - doesn't request confirmation of changes USE WITH CAUTION"); - options.getOption("s").setType(boolean.class); options.addOption("w", "workflow", false, "workflow - when adding new items, use collection workflow"); - options.getOption("w").setType(boolean.class); options.addOption("n", "notify", false, "notify - when adding new items using a workflow, send notification emails"); - options.getOption("n").setType(boolean.class); options.addOption("v", "validate-only", false, "validate - just validate the csv, don't run the import"); - options.getOption("v").setType(boolean.class); options.addOption("t", "template", false, "template - when adding new items, use the collection template (if it exists)"); - options.getOption("t").setType(boolean.class); options.addOption("h", "help", false, "help"); - options.getOption("h").setType(boolean.class); super.options = options; } diff --git a/dspace-api/src/main/java/org/dspace/app/exception/ResourceAlreadyExistsException.java b/dspace-api/src/main/java/org/dspace/app/exception/ResourceAlreadyExistsException.java new file mode 100644 index 000000000000..8291af87fc2e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/exception/ResourceAlreadyExistsException.java @@ -0,0 +1,32 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.exception; + +/** + * This class provides an exception to be used when trying to save a resource + * that already exists. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResourceAlreadyExistsException extends RuntimeException { + + private static final long serialVersionUID = 1L; + + /** + * Create a ResourceAlreadyExistsException with a message and the already + * existing resource. + * + * @param message the error message + */ + public ResourceAlreadyExistsException(String message) { + super(message); + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/Harvest.java b/dspace-api/src/main/java/org/dspace/app/harvest/Harvest.java index 1eaa78338004..f2630572e362 100644 --- a/dspace-api/src/main/java/org/dspace/app/harvest/Harvest.java +++ b/dspace-api/src/main/java/org/dspace/app/harvest/Harvest.java @@ -13,11 +13,8 @@ import java.util.List; import java.util.UUID; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.StringUtils; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; @@ -36,224 +33,223 @@ import org.dspace.harvest.OAIHarvester; import org.dspace.harvest.factory.HarvestServiceFactory; import org.dspace.harvest.service.HarvestedCollectionService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.utils.DSpace; /** * Test class for harvested collections. * * @author Alexey Maslov */ -public class Harvest { - private static Context context; - - private static final HarvestedCollectionService harvestedCollectionService = - HarvestServiceFactory.getInstance().getHarvestedCollectionService(); - private static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); - private static final CollectionService collectionService = - ContentServiceFactory.getInstance().getCollectionService(); - - public static void main(String[] argv) throws Exception { - // create an options object and populate it - CommandLineParser parser = new DefaultParser(); - - Options options = new Options(); - - options.addOption("p", "purge", false, "delete all items in the collection"); - options.addOption("r", "run", false, "run the standard harvest procedure"); - options.addOption("g", "ping", false, "test the OAI server and set"); - options.addOption("o", "once", false, "run the harvest procedure with specified parameters"); - options.addOption("s", "setup", false, "Set the collection up for harvesting"); - options.addOption("S", "start", false, "start the harvest loop"); - options.addOption("R", "reset", false, "reset harvest status on all collections"); - options.addOption("P", "purge", false, "purge all harvestable collections"); - - - options.addOption("e", "eperson", true, - "eperson"); - options.addOption("c", "collection", true, - "harvesting collection (handle or id)"); - options.addOption("t", "type", true, - "type of harvesting (0 for none)"); - options.addOption("a", "address", true, - "address of the OAI-PMH server"); - options.addOption("i", "oai_set_id", true, - "id of the PMH set representing the harvested collection"); - options.addOption("m", "metadata_format", true, - "the name of the desired metadata format for harvesting, resolved to namespace and " + - "crosswalk in dspace.cfg"); - - options.addOption("h", "help", false, "help"); - - CommandLine line = parser.parse(options, argv); - - String command = null; - String eperson = null; - String collection = null; - String oaiSource = null; - String oaiSetID = null; - String metadataKey = null; - int harvestType = 0; - - if (line.hasOption('h')) { - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("Harvest\n", options); - System.out.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id"); - System.out.println( - "RUNONCE harvest with arbitrary options: Harvest -o -e eperson -c collection -t harvest_type -a " + - "oai_source -i oai_set_id -m metadata_format"); - System.out.println( - "SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i " + - "oai_set_id -m metadata_format"); - System.out.println("RUN harvest once: Harvest -r -e eperson -c collection"); - System.out.println("START harvest scheduler: Harvest -S"); - System.out.println("RESET all harvest status: Harvest -R"); - System.out.println("PURGE a collection of items and settings: Harvest -p -e eperson -c collection"); - System.out.println("PURGE all harvestable collections: Harvest -P -e eperson"); - - - System.exit(0); - } +public class Harvest extends DSpaceRunnable { + + private HarvestedCollectionService harvestedCollectionService; + protected EPersonService ePersonService; + private CollectionService collectionService; + + private boolean help; + private String command = null; + private String collection = null; + private String oaiSource = null; + private String oaiSetID = null; + private String metadataKey = null; + private int harvestType = 0; + + protected Context context; + + + public HarvestScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("harvest", HarvestScriptConfiguration.class); + } + + public void setup() throws ParseException { + harvestedCollectionService = + HarvestServiceFactory.getInstance().getHarvestedCollectionService(); + ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + + assignCurrentUserInContext(); - if (line.hasOption('s')) { + help = commandLine.hasOption('h'); + + + if (commandLine.hasOption('s')) { command = "config"; } - if (line.hasOption('p')) { + if (commandLine.hasOption('p')) { command = "purge"; } - if (line.hasOption('r')) { + if (commandLine.hasOption('r')) { command = "run"; } - if (line.hasOption('g')) { + if (commandLine.hasOption('g')) { command = "ping"; } - if (line.hasOption('o')) { - command = "runOnce"; - } - if (line.hasOption('S')) { + if (commandLine.hasOption('S')) { command = "start"; } - if (line.hasOption('R')) { + if (commandLine.hasOption('R')) { command = "reset"; } - if (line.hasOption('P')) { + if (commandLine.hasOption('P')) { command = "purgeAll"; } - - - if (line.hasOption('e')) { - eperson = line.getOptionValue('e'); + if (commandLine.hasOption('o')) { + command = "reimport"; } - if (line.hasOption('c')) { - collection = line.getOptionValue('c'); + if (commandLine.hasOption('c')) { + collection = commandLine.getOptionValue('c'); } - if (line.hasOption('t')) { - harvestType = Integer.parseInt(line.getOptionValue('t')); + if (commandLine.hasOption('t')) { + harvestType = Integer.parseInt(commandLine.getOptionValue('t')); } else { harvestType = 0; } - if (line.hasOption('a')) { - oaiSource = line.getOptionValue('a'); + if (commandLine.hasOption('a')) { + oaiSource = commandLine.getOptionValue('a'); } - if (line.hasOption('i')) { - oaiSetID = line.getOptionValue('i'); + if (commandLine.hasOption('i')) { + oaiSetID = commandLine.getOptionValue('i'); } - if (line.hasOption('m')) { - metadataKey = line.getOptionValue('m'); + if (commandLine.hasOption('m')) { + metadataKey = commandLine.getOptionValue('m'); } + } + /** + * This method will assign the currentUser to the {@link Context} variable which is also created in this method. + * The instance of the method in this class will fetch the EPersonIdentifier from this class, this identifier + * was given to this class upon instantiation, it'll then be used to find the {@link EPerson} associated with it + * and this {@link EPerson} will be set as the currentUser of the created {@link Context} + * @throws ParseException If something went wrong with the retrieval of the EPerson Identifier + */ + protected void assignCurrentUserInContext() throws ParseException { + UUID currentUserUuid = this.getEpersonIdentifier(); + try { + this.context = new Context(Context.Mode.BATCH_EDIT); + EPerson eperson = ePersonService.find(context, currentUserUuid); + if (eperson == null) { + super.handler.logError("EPerson not found: " + currentUserUuid); + throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid); + } + this.context.setCurrentUser(eperson); + } catch (SQLException e) { + handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e); + } + } - // Instantiate our class - Harvest harvester = new Harvest(); - harvester.context = new Context(Context.Mode.BATCH_EDIT); - + public void internalRun() throws Exception { + if (help) { + printHelp(); + handler.logInfo("PING OAI server: Harvest -g -a oai_source -i oai_set_id"); + handler.logInfo( + "SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i " + + "oai_set_id -m metadata_format"); + handler.logInfo("RUN harvest once: Harvest -r -e eperson -c collection"); + handler.logInfo("START harvest scheduler: Harvest -S"); + handler.logInfo("RESET all harvest status: Harvest -R"); + handler.logInfo("PURGE a collection of items and settings: Harvest -p -e eperson -c collection"); + handler.logInfo("PURGE all harvestable collections: Harvest -P -e eperson"); + + return; + } - // Check our options - if (command == null) { - System.out - .println("Error - no parameters specified (run with -h flag for details)"); - System.exit(1); + if (StringUtils.isBlank(command)) { + handler.logError("No parameters specified (run with -h flag for details)"); + throw new UnsupportedOperationException("No command specified"); } else if ("run".equals(command)) { // Run a single harvest cycle on a collection using saved settings. - if (collection == null || eperson == null) { - System.out - .println("Error - a target collection and eperson must be provided"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); + if (collection == null || context.getCurrentUser() == null) { + handler.logError("A target collection and eperson must be provided (run with -h flag for details)"); + throw new UnsupportedOperationException("A target collection and eperson must be provided"); } - - harvester.runHarvest(collection, eperson); + runHarvest(context, collection); } else if ("start".equals(command)) { // start the harvest loop startHarvester(); } else if ("reset".equals(command)) { // reset harvesting status - resetHarvesting(); + resetHarvesting(context); } else if ("purgeAll".equals(command)) { // purge all collections that are set up for harvesting (obviously for testing purposes only) - if (eperson == null) { - System.out - .println("Error - an eperson must be provided"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); + if (context.getCurrentUser() == null) { + handler.logError("An eperson must be provided (run with -h flag for details)"); + throw new UnsupportedOperationException("An eperson must be provided"); } List harvestedCollections = harvestedCollectionService.findAll(context); for (HarvestedCollection harvestedCollection : harvestedCollections) { - System.out.println( - "Purging the following collections (deleting items and resetting harvest status): " + - harvestedCollection - .getCollection().getID().toString()); - harvester.purgeCollection(harvestedCollection.getCollection().getID().toString(), eperson); + handler.logInfo( + "Purging the following collections (deleting items and resetting harvest status): " + + harvestedCollection + .getCollection().getID().toString()); + purgeCollection(context, harvestedCollection.getCollection().getID().toString()); } context.complete(); } else if ("purge".equals(command)) { // Delete all items in a collection. Useful for testing fresh harvests. - if (collection == null || eperson == null) { - System.out - .println("Error - a target collection and eperson must be provided"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); + if (collection == null || context.getCurrentUser() == null) { + handler.logError("A target collection and eperson must be provided (run with -h flag for details)"); + throw new UnsupportedOperationException("A target collection and eperson must be provided"); } - harvester.purgeCollection(collection, eperson); + purgeCollection(context, collection); + context.complete(); + + } else if ("reimport".equals(command)) { + // Delete all items in a collection. Useful for testing fresh harvests. + if (collection == null || context.getCurrentUser() == null) { + handler.logError("A target collection and eperson must be provided (run with -h flag for details)"); + throw new UnsupportedOperationException("A target collection and eperson must be provided"); + } + purgeCollection(context, collection); + runHarvest(context, collection); context.complete(); - //TODO: implement this... remove all items and remember to unset "last-harvested" settings } else if ("config".equals(command)) { // Configure a collection with the three main settings if (collection == null) { - System.out.println("Error - a target collection must be provided"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); + handler.logError("A target collection must be provided (run with -h flag for details)"); + throw new UnsupportedOperationException("A target collection must be provided"); } if (oaiSource == null || oaiSetID == null) { - System.out.println("Error - both the OAI server address and OAI set id must be specified"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); + handler.logError( + "Both the OAI server address and OAI set id must be specified (run with -h flag for details)"); + throw new UnsupportedOperationException("Both the OAI server address and OAI set id must be specified"); } if (metadataKey == null) { - System.out - .println("Error - a metadata key (commonly the prefix) must be specified for this collection"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); + handler.logError( + "A metadata key (commonly the prefix) must be specified for this collection (run with -h flag" + + " for details)"); + throw new UnsupportedOperationException( + "A metadata key (commonly the prefix) must be specified for this collection"); } - harvester.configureCollection(collection, harvestType, oaiSource, oaiSetID, metadataKey); + configureCollection(context, collection, harvestType, oaiSource, oaiSetID, metadataKey); } else if ("ping".equals(command)) { if (oaiSource == null || oaiSetID == null) { - System.out.println("Error - both the OAI server address and OAI set id must be specified"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); + handler.logError( + "Both the OAI server address and OAI set id must be specified (run with -h flag for details)"); + throw new UnsupportedOperationException("Both the OAI server address and OAI set id must be specified"); } pingResponder(oaiSource, oaiSetID, metadataKey); + } else { + handler.logError( + "Your command '" + command + "' was not recognized properly (run with -h flag for details)"); + throw new UnsupportedOperationException("Your command '" + command + "' was not recognized properly"); } + + } /* * Resolve the ID into a collection and check to see if its harvesting options are set. If so, return * the collection, if not, bail out. */ - private Collection resolveCollection(String collectionID) { + private Collection resolveCollection(Context context, String collectionID) { DSpaceObject dso; Collection targetCollection = null; @@ -273,14 +269,14 @@ private Collection resolveCollection(String collectionID) { } } else { // not a handle, try and treat it as an collection database UUID - System.out.println("Looking up by UUID: " + collectionID + ", " + "in context: " + context); + handler.logInfo("Looking up by UUID: " + collectionID + ", " + "in context: " + context); targetCollection = collectionService.find(context, UUID.fromString(collectionID)); } } // was the collection valid? if (targetCollection == null) { - System.out.println("Cannot resolve " + collectionID + " to collection"); - System.exit(1); + handler.logError("Cannot resolve " + collectionID + " to collection"); + throw new UnsupportedOperationException("Cannot resolve " + collectionID + " to collection"); } } catch (SQLException se) { se.printStackTrace(); @@ -290,12 +286,12 @@ private Collection resolveCollection(String collectionID) { } - private void configureCollection(String collectionID, int type, String oaiSource, String oaiSetId, + private void configureCollection(Context context, String collectionID, int type, String oaiSource, String oaiSetId, String mdConfigId) { - System.out.println("Running: configure collection"); + handler.logInfo("Running: configure collection"); - Collection collection = resolveCollection(collectionID); - System.out.println(collection.getID()); + Collection collection = resolveCollection(context, collectionID); + handler.logInfo(String.valueOf(collection.getID())); try { HarvestedCollection hc = harvestedCollectionService.find(context, collection); @@ -310,9 +306,8 @@ private void configureCollection(String collectionID, int type, String oaiSource context.restoreAuthSystemState(); context.complete(); } catch (Exception e) { - System.out.println("Changes could not be committed"); - e.printStackTrace(); - System.exit(1); + handler.logError("Changes could not be committed"); + handler.handleException(e); } finally { if (context != null) { context.restoreAuthSystemState(); @@ -323,18 +318,15 @@ private void configureCollection(String collectionID, int type, String oaiSource /** * Purges a collection of all harvest-related data and settings. All items in the collection will be deleted. + * @param collectionID * - * @param collectionID - * @param email */ - private void purgeCollection(String collectionID, String email) { - System.out.println( - "Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID); - Collection collection = resolveCollection(collectionID); + private void purgeCollection(Context context, String collectionID) { + handler.logInfo( + "Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID); + Collection collection = resolveCollection(context, collectionID); try { - EPerson eperson = ePersonService.findByEmail(context, email); - context.setCurrentUser(eperson); context.turnOffAuthorisationSystem(); ItemService itemService = ContentServiceFactory.getInstance().getItemService(); @@ -343,7 +335,7 @@ private void purgeCollection(String collectionID, String email) { while (it.hasNext()) { i++; Item item = it.next(); - System.out.println("Deleting: " + item.getHandle()); + handler.logInfo("Deleting: " + item.getHandle()); collectionService.removeItem(context, collection, item); context.uncacheEntity(item);// Dispatch events every 50 items if (i % 50 == 0) { @@ -363,9 +355,8 @@ private void purgeCollection(String collectionID, String email) { context.restoreAuthSystemState(); context.dispatchEvents(); } catch (Exception e) { - System.out.println("Changes could not be committed"); - e.printStackTrace(); - System.exit(1); + handler.logError("Changes could not be committed"); + handler.handleException(e); } finally { context.restoreAuthSystemState(); } @@ -375,46 +366,42 @@ private void purgeCollection(String collectionID, String email) { /** * Run a single harvest cycle on the specified collection under the authorization of the supplied EPerson */ - private void runHarvest(String collectionID, String email) { - System.out.println("Running: a harvest cycle on " + collectionID); + private void runHarvest(Context context, String collectionID) { + handler.logInfo("Running: a harvest cycle on " + collectionID); - System.out.print("Initializing the harvester... "); + handler.logInfo("Initializing the harvester... "); OAIHarvester harvester = null; try { - Collection collection = resolveCollection(collectionID); + Collection collection = resolveCollection(context, collectionID); HarvestedCollection hc = harvestedCollectionService.find(context, collection); harvester = new OAIHarvester(context, collection, hc); - System.out.println("success. "); + handler.logInfo("Initialized the harvester successfully"); } catch (HarvestingException hex) { - System.out.print("failed. "); - System.out.println(hex.getMessage()); + handler.logError("Initializing the harvester failed."); throw new IllegalStateException("Unable to harvest", hex); } catch (SQLException se) { - System.out.print("failed. "); - System.out.println(se.getMessage()); + handler.logError("Initializing the harvester failed."); throw new IllegalStateException("Unable to access database", se); } try { // Harvest will not work for an anonymous user - EPerson eperson = ePersonService.findByEmail(context, email); - System.out.println("Harvest started... "); - context.setCurrentUser(eperson); + handler.logInfo("Harvest started... "); harvester.runHarvest(); context.complete(); } catch (SQLException | AuthorizeException | IOException e) { throw new IllegalStateException("Failed to run harvester", e); } - System.out.println("Harvest complete. "); + handler.logInfo("Harvest complete. "); } /** * Resets harvest_status and harvest_start_time flags for all collections that have a row in the * harvested_collections table */ - private static void resetHarvesting() { - System.out.print("Resetting harvest status flag on all collections... "); + private void resetHarvesting(Context context) { + handler.logInfo("Resetting harvest status flag on all collections... "); try { List harvestedCollections = harvestedCollectionService.findAll(context); @@ -424,21 +411,21 @@ private static void resetHarvesting() { harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY); harvestedCollectionService.update(context, harvestedCollection); } - System.out.println("success. "); + handler.logInfo("Reset harvest status flag successfully"); } catch (Exception ex) { - System.out.println("failed. "); - ex.printStackTrace(); + handler.logError("Resetting harvest status flag failed"); + handler.handleException(ex); } } /** * Starts up the harvest scheduler. Terminating this process will stop the scheduler. */ - private static void startHarvester() { + private void startHarvester() { try { - System.out.print("Starting harvest loop... "); + handler.logInfo("Starting harvest loop... "); HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler(); - System.out.println("running. "); + handler.logInfo("running. "); } catch (Exception ex) { ex.printStackTrace(); } @@ -451,29 +438,31 @@ private static void startHarvester() { * @param set name of an item set. * @param metadataFormat local prefix name, or null for "dc". */ - private static void pingResponder(String server, String set, String metadataFormat) { + private void pingResponder(String server, String set, String metadataFormat) { List errors; - System.out.print("Testing basic PMH access: "); + handler.logInfo("Testing basic PMH access: "); errors = harvestedCollectionService.verifyOAIharvester(server, set, - (null != metadataFormat) ? metadataFormat : "dc", false); + (null != metadataFormat) ? metadataFormat : "dc", false); if (errors.isEmpty()) { - System.out.println("OK"); + handler.logInfo("OK"); } else { for (String error : errors) { - System.err.println(error); + handler.logError(error); } } - System.out.print("Testing ORE support: "); + handler.logInfo("Testing ORE support: "); errors = harvestedCollectionService.verifyOAIharvester(server, set, - (null != metadataFormat) ? metadataFormat : "dc", true); + (null != metadataFormat) ? metadataFormat : "dc", true); if (errors.isEmpty()) { - System.out.println("OK"); + handler.logInfo("OK"); } else { for (String error : errors) { - System.err.println(error); + handler.logError(error); } } } + + } diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestCli.java b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestCli.java new file mode 100644 index 000000000000..8c9766e93421 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestCli.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.harvest; + +import java.sql.SQLException; + +import org.apache.commons.cli.ParseException; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; + +public class HarvestCli extends Harvest { + + /** + * This is the overridden instance of the {@link Harvest#assignCurrentUserInContext()} method in the parent class + * {@link Harvest}. + * This is done so that the CLI version of the Script is able to retrieve its currentUser from the -e flag given + * with the parameters of the Script. + * + * @throws ParseException If the e flag was not given to the parameters when calling the script + */ + @Override + protected void assignCurrentUserInContext() throws ParseException { + if (this.commandLine.hasOption('e')) { + String ePersonEmail = this.commandLine.getOptionValue('e'); + this.context = new Context(Context.Mode.BATCH_EDIT); + try { + EPerson ePerson = ePersonService.findByEmail(this.context, ePersonEmail); + if (ePerson == null) { + super.handler.logError("EPerson not found: " + ePersonEmail); + throw new IllegalArgumentException("Unable to find a user with email: " + ePersonEmail); + } + this.context.setCurrentUser(ePerson); + } catch (SQLException e) { + throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail); + } + } + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestCliScriptConfiguration.java new file mode 100644 index 000000000000..9e58b64a6243 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestCliScriptConfiguration.java @@ -0,0 +1,22 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.harvest; + +import org.apache.commons.cli.Options; + + +public class HarvestCliScriptConfiguration extends HarvestScriptConfiguration { + + public Options getOptions() { + Options options = super.getOptions(); + options.addOption("e", "eperson", true, + "eperson"); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java new file mode 100644 index 000000000000..ff83c3ecb225 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java @@ -0,0 +1,56 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.harvest; + +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + + +public class HarvestScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + + public Options getOptions() { + Options options = new Options(); + options.addOption("p", "purge", false, "delete all items in the collection"); + options.addOption("r", "run", false, "run the standard harvest procedure"); + options.addOption("g", "ping", false, "test the OAI server and set"); + options.addOption("s", "setup", false, "Set the collection up for harvesting"); + options.addOption("S", "start", false, "start the harvest loop"); + options.addOption("R", "reset", false, "reset harvest status on all collections"); + options.addOption("P", "purgeCollections", false, "purge all harvestable collections"); + options.addOption("o", "reimport", false, "reimport all items in the collection, " + + "this is equivalent to -p -r, purging all items in a collection and reimporting them"); + options.addOption("c", "collection", true, + "harvesting collection (handle or id)"); + options.addOption("t", "type", true, + "type of harvesting (0 for none)"); + options.addOption("a", "address", true, + "address of the OAI-PMH server"); + options.addOption("i", "oai_set_id", true, + "id of the PMH set representing the harvested collection"); + options.addOption("m", "metadata_format", true, + "the name of the desired metadata format for harvesting, resolved to namespace and " + + "crosswalk in dspace.cfg"); + + options.addOption("h", "help", false, "help"); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExport.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExport.java new file mode 100644 index 000000000000..71fc088694d9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExport.java @@ -0,0 +1,264 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemexport; + +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.UUID; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.io.file.PathUtils; +import org.dspace.app.itemexport.factory.ItemExportServiceFactory; +import org.dspace.app.itemexport.service.ItemExportService; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.utils.DSpace; + +/** + * Item exporter to create simple AIPs for DSpace content. Currently exports + * individual items, or entire collections. For instructions on use, see + * printUsage() method. + *

+ * ItemExport creates the simple AIP package that the importer also uses. It + * consists of: + *

+ * /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin + * core in RDF schema / contents - text file, listing one file per line / file1 + * - files contained in the item / file2 / ... + *

+ * issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into + * {@code &}, etc.) + *

+ * Modified by David Little, UCSD Libraries 12/21/04 to allow the registration + * of files (bitstreams) into DSpace. + * + * @author David Little + * @author Jay Paz + */ +public class ItemExport extends DSpaceRunnable { + + public static final String TEMP_DIR = "exportSAF"; + public static final String ZIP_NAME = "exportSAFZip"; + public static final String ZIP_FILENAME = "saf-export"; + public static final String ZIP_EXT = "zip"; + + protected String typeString = null; + protected String destDirName = null; + protected String idString = null; + protected int seqStart = -1; + protected int type = -1; + protected Item item = null; + protected Collection collection = null; + protected boolean migrate = false; + protected boolean zip = false; + protected String zipFileName = ""; + protected boolean excludeBitstreams = false; + protected boolean help = false; + + protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected static final EPersonService epersonService = + EPersonServiceFactory.getInstance().getEPersonService(); + + @Override + public ItemExportScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("export", ItemExportScriptConfiguration.class); + } + + @Override + public void setup() throws ParseException { + help = commandLine.hasOption('h'); + + if (commandLine.hasOption('t')) { // type + typeString = commandLine.getOptionValue('t'); + + if ("ITEM".equals(typeString)) { + type = Constants.ITEM; + } else if ("COLLECTION".equals(typeString)) { + type = Constants.COLLECTION; + } + } + + if (commandLine.hasOption('i')) { // id + idString = commandLine.getOptionValue('i'); + } + + setNumber(); + + if (commandLine.hasOption('m')) { // number + migrate = true; + } + + if (commandLine.hasOption('x')) { + excludeBitstreams = true; + } + } + + @Override + public void internalRun() throws Exception { + if (help) { + printHelp(); + return; + } + + validate(); + + Context context = new Context(); + context.turnOffAuthorisationSystem(); + + if (type == Constants.ITEM) { + // first, is myIDString a handle? + if (idString.indexOf('/') != -1) { + item = (Item) handleService.resolveToObject(context, idString); + + if ((item == null) || (item.getType() != Constants.ITEM)) { + item = null; + } + } else { + item = itemService.find(context, UUID.fromString(idString)); + } + + if (item == null) { + handler.logError("The item cannot be found: " + idString + " (run with -h flag for details)"); + throw new UnsupportedOperationException("The item cannot be found: " + idString); + } + } else { + if (idString.indexOf('/') != -1) { + // has a / must be a handle + collection = (Collection) handleService.resolveToObject(context, + idString); + + // ensure it's a collection + if ((collection == null) + || (collection.getType() != Constants.COLLECTION)) { + collection = null; + } + } else { + collection = collectionService.find(context, UUID.fromString(idString)); + } + + if (collection == null) { + handler.logError("The collection cannot be found: " + idString + " (run with -h flag for details)"); + throw new UnsupportedOperationException("The collection cannot be found: " + idString); + } + } + + ItemExportService itemExportService = ItemExportServiceFactory.getInstance() + .getItemExportService(); + try { + itemExportService.setHandler(handler); + process(context, itemExportService); + context.complete(); + } catch (Exception e) { + context.abort(); + throw new Exception(e); + } + } + + /** + * Validate the options + */ + protected void validate() { + if (type == -1) { + handler.logError("The type must be either COLLECTION or ITEM (run with -h flag for details)"); + throw new UnsupportedOperationException("The type must be either COLLECTION or ITEM"); + } + + if (idString == null) { + handler.logError("The ID must be set to either a database ID or a handle (run with -h flag for details)"); + throw new UnsupportedOperationException("The ID must be set to either a database ID or a handle"); + } + } + + /** + * Process the export + * @param context + * @throws Exception + */ + protected void process(Context context, ItemExportService itemExportService) throws Exception { + setEPerson(context); + setDestDirName(context, itemExportService); + setZip(context); + + Iterator items; + if (item != null) { + List myItems = new ArrayList<>(); + myItems.add(item); + items = myItems.iterator(); + } else { + handler.logInfo("Exporting from collection: " + idString); + items = itemService.findByCollection(context, collection); + } + itemExportService.exportAsZip(context, items, destDirName, zipFileName, + seqStart, migrate, excludeBitstreams); + + File zip = new File(destDirName + System.getProperty("file.separator") + zipFileName); + try (InputStream is = new FileInputStream(zip)) { + // write input stream on handler + handler.writeFilestream(context, ZIP_FILENAME + "." + ZIP_EXT, is, ZIP_NAME); + } finally { + PathUtils.deleteDirectory(Path.of(destDirName)); + } + } + + /** + * Set the destination directory option + */ + protected void setDestDirName(Context context, ItemExportService itemExportService) throws Exception { + destDirName = itemExportService.getExportWorkDirectory() + File.separator + TEMP_DIR; + } + + /** + * Set the zip option + */ + protected void setZip(Context context) { + zip = true; + zipFileName = ZIP_FILENAME + "-" + context.getCurrentUser().getID() + "." + ZIP_EXT; + } + + /** + * Set the number option + */ + protected void setNumber() { + seqStart = 1; + if (commandLine.hasOption('n')) { // number + seqStart = Integer.parseInt(commandLine.getOptionValue('n')); + } + } + + private void setEPerson(Context context) throws SQLException { + EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier()); + + // check eperson + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier()); + throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier()); + } + + context.setCurrentUser(myEPerson); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLI.java new file mode 100644 index 000000000000..8e9af1e01094 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLI.java @@ -0,0 +1,96 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemexport; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import org.dspace.app.itemexport.service.ItemExportService; +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * CLI variant for the {@link ItemExport} class. + * This was done to specify the specific behaviors for the CLI. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemExportCLI extends ItemExport { + + @Override + protected void validate() { + super.validate(); + + setDestDirName(); + + if (destDirName == null) { + handler.logError("The destination directory must be set (run with -h flag for details)"); + throw new UnsupportedOperationException("The destination directory must be set"); + } + + if (seqStart == -1) { + handler.logError("The sequence start number must be set (run with -h flag for details)"); + throw new UnsupportedOperationException("The sequence start number must be set"); + } + } + + @Override + protected void process(Context context, ItemExportService itemExportService) throws Exception { + setZip(context); + + if (zip) { + Iterator items; + if (item != null) { + List myItems = new ArrayList<>(); + myItems.add(item); + items = myItems.iterator(); + } else { + handler.logInfo("Exporting from collection: " + idString); + items = itemService.findByCollection(context, collection); + } + itemExportService.exportAsZip(context, items, destDirName, zipFileName, + seqStart, migrate, excludeBitstreams); + } else { + if (item != null) { + // it's only a single item + itemExportService + .exportItem(context, Collections.singletonList(item).iterator(), destDirName, + seqStart, migrate, excludeBitstreams); + } else { + handler.logInfo("Exporting from collection: " + idString); + + // it's a collection, so do a bunch of items + Iterator i = itemService.findByCollection(context, collection); + itemExportService.exportItem(context, i, destDirName, seqStart, migrate, excludeBitstreams); + } + } + } + + protected void setDestDirName() { + if (commandLine.hasOption('d')) { // dest + destDirName = commandLine.getOptionValue('d'); + } + } + + @Override + protected void setZip(Context context) { + if (commandLine.hasOption('z')) { + zip = true; + zipFileName = commandLine.getOptionValue('z'); + } + } + + @Override + protected void setNumber() { + if (commandLine.hasOption('n')) { // number + seqStart = Integer.parseInt(commandLine.getOptionValue('n')); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLIScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLIScriptConfiguration.java new file mode 100644 index 000000000000..ff79c7cfa703 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLIScriptConfiguration.java @@ -0,0 +1,56 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemexport; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link ItemExportCLI} script + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemExportCLIScriptConfiguration extends ItemExportScriptConfiguration { + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption(Option.builder("t").longOpt("type") + .desc("type: COLLECTION or ITEM") + .hasArg().required().build()); + options.addOption(Option.builder("i").longOpt("id") + .desc("ID or handle of thing to export") + .hasArg().required().build()); + options.addOption(Option.builder("d").longOpt("dest") + .desc("destination where you want items to go") + .hasArg().required().build()); + options.addOption(Option.builder("n").longOpt("number") + .desc("sequence number to begin exporting items with") + .hasArg().required().build()); + options.addOption(Option.builder("z").longOpt("zip") + .desc("export as zip file (specify filename e.g. export.zip)") + .hasArg().required(false).build()); + options.addOption(Option.builder("m").longOpt("migrate") + .desc("export for migration (remove handle and metadata that will be re-created in new system)") + .hasArg(false).required(false).build()); + + // as pointed out by Peter Dietz this provides similar functionality to export metadata + // but it is needed since it directly exports to Simple Archive Format (SAF) + options.addOption(Option.builder("x").longOpt("exclude-bitstreams") + .desc("do not export bitstreams") + .hasArg(false).required(false).build()); + + options.addOption(Option.builder("h").longOpt("help") + .desc("help") + .hasArg(false).required(false).build()); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLITool.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLITool.java deleted file mode 100644 index d6a69b582394..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportCLITool.java +++ /dev/null @@ -1,246 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.itemexport; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.UUID; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; -import org.dspace.app.itemexport.factory.ItemExportServiceFactory; -import org.dspace.app.itemexport.service.ItemExportService; -import org.dspace.content.Collection; -import org.dspace.content.Item; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.ItemService; -import org.dspace.core.Constants; -import org.dspace.core.Context; -import org.dspace.handle.factory.HandleServiceFactory; -import org.dspace.handle.service.HandleService; - -/** - * Item exporter to create simple AIPs for DSpace content. Currently exports - * individual items, or entire collections. For instructions on use, see - * printUsage() method. - *

- * ItemExport creates the simple AIP package that the importer also uses. It - * consists of: - *

- * /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin - * core in RDF schema / contents - text file, listing one file per line / file1 - * - files contained in the item / file2 / ... - *

- * issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into - * {@code &}, etc.) - *

- * Modified by David Little, UCSD Libraries 12/21/04 to allow the registration - * of files (bitstreams) into DSpace. - * - * @author David Little - * @author Jay Paz - */ -public class ItemExportCLITool { - - protected static ItemExportService itemExportService = ItemExportServiceFactory.getInstance() - .getItemExportService(); - protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); - protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - - /** - * Default constructor - */ - private ItemExportCLITool() { } - - /* - * - */ - public static void main(String[] argv) throws Exception { - // create an options object and populate it - CommandLineParser parser = new DefaultParser(); - - Options options = new Options(); - - options.addOption("t", "type", true, "type: COLLECTION or ITEM"); - options.addOption("i", "id", true, "ID or handle of thing to export"); - options.addOption("d", "dest", true, - "destination where you want items to go"); - options.addOption("m", "migrate", false, - "export for migration (remove handle and metadata that will be re-created in new system)"); - options.addOption("n", "number", true, - "sequence number to begin exporting items with"); - options.addOption("z", "zip", true, "export as zip file (specify filename e.g. export.zip)"); - options.addOption("h", "help", false, "help"); - - // as pointed out by Peter Dietz this provides similar functionality to export metadata - // but it is needed since it directly exports to Simple Archive Format (SAF) - options.addOption("x", "exclude-bitstreams", false, "do not export bitstreams"); - - CommandLine line = parser.parse(options, argv); - - String typeString = null; - String destDirName = null; - String myIDString = null; - int seqStart = -1; - int myType = -1; - - Item myItem = null; - Collection mycollection = null; - - if (line.hasOption('h')) { - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("ItemExport\n", options); - System.out - .println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number"); - System.out - .println("singleitem: ItemExport -t ITEM -i ID -d dest -n number"); - - System.exit(0); - } - - if (line.hasOption('t')) { // type - typeString = line.getOptionValue('t'); - - if ("ITEM".equals(typeString)) { - myType = Constants.ITEM; - } else if ("COLLECTION".equals(typeString)) { - myType = Constants.COLLECTION; - } - } - - if (line.hasOption('i')) { // id - myIDString = line.getOptionValue('i'); - } - - if (line.hasOption('d')) { // dest - destDirName = line.getOptionValue('d'); - } - - if (line.hasOption('n')) { // number - seqStart = Integer.parseInt(line.getOptionValue('n')); - } - - boolean migrate = false; - if (line.hasOption('m')) { // number - migrate = true; - } - - boolean zip = false; - String zipFileName = ""; - if (line.hasOption('z')) { - zip = true; - zipFileName = line.getOptionValue('z'); - } - - boolean excludeBitstreams = false; - if (line.hasOption('x')) { - excludeBitstreams = true; - } - - // now validate the args - if (myType == -1) { - System.out - .println("type must be either COLLECTION or ITEM (-h for help)"); - System.exit(1); - } - - if (destDirName == null) { - System.out - .println("destination directory must be set (-h for help)"); - System.exit(1); - } - - if (seqStart == -1) { - System.out - .println("sequence start number must be set (-h for help)"); - System.exit(1); - } - - if (myIDString == null) { - System.out - .println("ID must be set to either a database ID or a handle (-h for help)"); - System.exit(1); - } - - Context c = new Context(Context.Mode.READ_ONLY); - c.turnOffAuthorisationSystem(); - - if (myType == Constants.ITEM) { - // first, is myIDString a handle? - if (myIDString.indexOf('/') != -1) { - myItem = (Item) handleService.resolveToObject(c, myIDString); - - if ((myItem == null) || (myItem.getType() != Constants.ITEM)) { - myItem = null; - } - } else { - myItem = itemService.find(c, UUID.fromString(myIDString)); - } - - if (myItem == null) { - System.out - .println("Error, item cannot be found: " + myIDString); - } - } else { - if (myIDString.indexOf('/') != -1) { - // has a / must be a handle - mycollection = (Collection) handleService.resolveToObject(c, - myIDString); - - // ensure it's a collection - if ((mycollection == null) - || (mycollection.getType() != Constants.COLLECTION)) { - mycollection = null; - } - } else if (myIDString != null) { - mycollection = collectionService.find(c, UUID.fromString(myIDString)); - } - - if (mycollection == null) { - System.out.println("Error, collection cannot be found: " - + myIDString); - System.exit(1); - } - } - - if (zip) { - Iterator items; - if (myItem != null) { - List myItems = new ArrayList<>(); - myItems.add(myItem); - items = myItems.iterator(); - } else { - System.out.println("Exporting from collection: " + myIDString); - items = itemService.findByCollection(c, mycollection); - } - itemExportService.exportAsZip(c, items, destDirName, zipFileName, seqStart, migrate, excludeBitstreams); - } else { - if (myItem != null) { - // it's only a single item - itemExportService - .exportItem(c, Collections.singletonList(myItem).iterator(), destDirName, seqStart, migrate, - excludeBitstreams); - } else { - System.out.println("Exporting from collection: " + myIDString); - - // it's a collection, so do a bunch of items - Iterator i = itemService.findByCollection(c, mycollection); - itemExportService.exportItem(c, i, destDirName, seqStart, migrate, excludeBitstreams); - } - } - - c.complete(); - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java new file mode 100644 index 000000000000..b37df5f5ea59 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java @@ -0,0 +1,62 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemexport; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link ItemExport} script + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemExportScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption(Option.builder("t").longOpt("type") + .desc("type: COLLECTION or ITEM") + .hasArg().required().build()); + options.addOption(Option.builder("i").longOpt("id") + .desc("ID or handle of thing to export") + .hasArg().required().build()); + options.addOption(Option.builder("n").longOpt("number") + .desc("sequence number to begin exporting items with") + .hasArg().required(false).build()); + options.addOption(Option.builder("m").longOpt("migrate") + .desc("export for migration (remove handle and metadata that will be re-created in new system)") + .hasArg(false).required(false).build()); + + // as pointed out by Peter Dietz this provides similar functionality to export metadata + // but it is needed since it directly exports to Simple Archive Format (SAF) + options.addOption(Option.builder("x").longOpt("exclude-bitstreams") + .desc("do not export bitstreams") + .hasArg(false).required(false).build()); + + options.addOption(Option.builder("h").longOpt("help") + .desc("help") + .hasArg(false).required(false).build()); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java index 7d5e63c1274d..a884f9b07564 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportServiceImpl.java @@ -16,6 +16,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -51,11 +52,12 @@ import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.core.Utils; import org.dspace.eperson.EPerson; import org.dspace.eperson.service.EPersonService; import org.dspace.handle.service.HandleService; +import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; @@ -63,17 +65,21 @@ * Item exporter to create simple AIPs for DSpace content. Currently exports * individual items, or entire collections. For instructions on use, see * printUsage() method. - *

+ *

* ItemExport creates the simple AIP package that the importer also uses. It * consists of: - *

- * /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin - * core in RDF schema / contents - text file, listing one file per line / file1 - * - files contained in the item / file2 / ... - *

+ *

{@code
+ * /exportdir/42/ (one directory per item)
+ *              / dublin_core.xml - qualified dublin core in RDF schema
+ *              / contents - text file, listing one file per line
+ *              / file1 - files contained in the item
+ *              / file2
+ *              / ...
+ * }
+ *

* issues -doesn't handle special characters in metadata (needs to turn {@code &'s} into * {@code &}, etc.) - *

+ *

* Modified by David Little, UCSD Libraries 12/21/04 to allow the registration * of files (bitstreams) into DSpace. * @@ -96,11 +102,12 @@ public class ItemExportServiceImpl implements ItemExportService { @Autowired(required = true) protected ConfigurationService configurationService; - /** * log4j logger */ - private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemExportServiceImpl.class); + private final Logger log = org.apache.logging.log4j.LogManager.getLogger(); + + private DSpaceRunnableHandler handler; protected ItemExportServiceImpl() { @@ -125,11 +132,11 @@ public void exportItem(Context c, Iterator i, } } - System.out.println("Beginning export"); + logInfo("Beginning export"); while (i.hasNext()) { if (SUBDIR_LIMIT > 0 && ++counter == SUBDIR_LIMIT) { - subdir = Integer.valueOf(subDirSuffix++).toString(); + subdir = Integer.toString(subDirSuffix++); fullPath = destDirName + File.separatorChar + subdir; counter = 0; @@ -138,7 +145,7 @@ public void exportItem(Context c, Iterator i, } } - System.out.println("Exporting item to " + mySequenceNumber); + logInfo("Exporting item to " + mySequenceNumber); Item item = i.next(); exportItem(c, item, fullPath, mySequenceNumber, migrate, excludeBitstreams); c.uncacheEntity(item); @@ -154,7 +161,7 @@ protected void exportItem(Context c, Item myItem, String destDirName, // now create a subdirectory File itemDir = new File(destDir + "/" + seqStart); - System.out.println("Exporting Item " + myItem.getID() + + logInfo("Exporting Item " + myItem.getID() + (myItem.getHandle() != null ? ", handle " + myItem.getHandle() : "") + " to " + itemDir); @@ -167,6 +174,7 @@ protected void exportItem(Context c, Item myItem, String destDirName, // make it this far, now start exporting writeMetadata(c, myItem, itemDir, migrate); writeBitstreams(c, myItem, itemDir, excludeBitstreams); + writeCollections(myItem, itemDir); if (!migrate) { writeHandle(c, myItem, itemDir); } @@ -191,7 +199,7 @@ protected void exportItem(Context c, Item myItem, String destDirName, */ protected void writeMetadata(Context c, Item i, File destDir, boolean migrate) throws Exception { - Set schemas = new HashSet(); + Set schemas = new HashSet<>(); List dcValues = itemService.getMetadata(i, Item.ANY, Item.ANY, Item.ANY, Item.ANY); for (MetadataValue metadataValue : dcValues) { schemas.add(metadataValue.getMetadataField().getMetadataSchema().getName()); @@ -224,7 +232,7 @@ protected void writeMetadata(Context c, String schema, Item i, File outFile = new File(destDir, filename); - System.out.println("Attempting to create file " + outFile); + logInfo("Attempting to create file " + outFile); if (outFile.createNewFile()) { BufferedOutputStream out = new BufferedOutputStream( @@ -267,7 +275,7 @@ protected void writeMetadata(Context c, String schema, Item i, + Utils.addEntities(dcv.getValue()) + "\n") .getBytes("UTF-8"); - if ((!migrate) || + if (!migrate || (migrate && !( ("date".equals(metadataField.getElement()) && "issued".equals(qualifier)) || ("date".equals(metadataField.getElement()) && "accessioned".equals(qualifier)) || @@ -292,10 +300,10 @@ protected void writeMetadata(Context c, String schema, Item i, } // When migrating, only keep date.issued if it is different to date.accessioned - if ((migrate) && + if (migrate && (dateIssued != null) && (dateAccessioned != null) && - (!dateIssued.equals(dateAccessioned))) { + !dateIssued.equals(dateAccessioned)) { utf8 = (" " + Utils.addEntities(dateIssued) + "\n") @@ -330,7 +338,7 @@ protected void writeHandle(Context c, Item i, File destDir) File outFile = new File(destDir, filename); if (outFile.createNewFile()) { - PrintWriter out = new PrintWriter(new FileWriter(outFile)); + PrintWriter out = new PrintWriter(new FileWriter(outFile, StandardCharsets.UTF_8)); out.println(i.getHandle()); @@ -342,6 +350,33 @@ protected void writeHandle(Context c, Item i, File destDir) } } + /** + * Create the 'collections' file. List handles of all Collections which + * contain this Item. The "owning" Collection is listed first. + * + * @param item list collections holding this Item. + * @param destDir write the file here. + * @throws IOException if the file cannot be created or written. + */ + protected void writeCollections(Item item, File destDir) + throws IOException { + File outFile = new File(destDir, "collections"); + if (outFile.createNewFile()) { + try (PrintWriter out = new PrintWriter(new FileWriter(outFile))) { + String ownerHandle = item.getOwningCollection().getHandle(); + out.println(ownerHandle); + for (Collection collection : item.getCollections()) { + String collectionHandle = collection.getHandle(); + if (!collectionHandle.equals(ownerHandle)) { + out.println(collectionHandle); + } + } + } + } else { + throw new IOException("Cannot create 'collections' in " + destDir); + } + } + /** * Create both the bitstreams and the contents file. Any bitstreams that * were originally registered will be marked in the contents file as such. @@ -360,7 +395,7 @@ protected void writeBitstreams(Context c, Item i, File destDir, File outFile = new File(destDir, "contents"); if (outFile.createNewFile()) { - PrintWriter out = new PrintWriter(new FileWriter(outFile)); + PrintWriter out = new PrintWriter(new FileWriter(outFile, StandardCharsets.UTF_8)); List bundles = i.getBundles(); @@ -398,7 +433,7 @@ protected void writeBitstreams(Context c, Item i, File destDir, File fdirs = new File(destDir + File.separator + dirs); if (!fdirs.exists() && !fdirs.mkdirs()) { - log.error("Unable to create destination directory"); + logError("Unable to create destination directory"); } } @@ -455,12 +490,12 @@ public void exportAsZip(Context context, Iterator items, File wkDir = new File(workDir); if (!wkDir.exists() && !wkDir.mkdirs()) { - log.error("Unable to create working direcory"); + logError("Unable to create working direcory"); } File dnDir = new File(destDirName); if (!dnDir.exists() && !dnDir.mkdirs()) { - log.error("Unable to create destination directory"); + logError("Unable to create destination directory"); } // export the items using normal export method @@ -474,7 +509,7 @@ public void exportAsZip(Context context, Iterator items, public void createDownloadableExport(DSpaceObject dso, Context context, boolean migrate) throws Exception { EPerson eperson = context.getCurrentUser(); - ArrayList list = new ArrayList(1); + ArrayList list = new ArrayList<>(1); list.add(dso); processDownloadableExport(list, context, eperson == null ? null : eperson.getEmail(), migrate); @@ -491,7 +526,7 @@ public void createDownloadableExport(List dsObjects, @Override public void createDownloadableExport(DSpaceObject dso, Context context, String additionalEmail, boolean migrate) throws Exception { - ArrayList list = new ArrayList(1); + ArrayList list = new ArrayList<>(1); list.add(dso); processDownloadableExport(list, context, additionalEmail, migrate); } @@ -629,11 +664,9 @@ protected void processDownloadableExport(List dsObjects, Thread go = new Thread() { @Override public void run() { - Context context = null; + Context context = new Context(); Iterator iitems = null; try { - // create a new dspace context - context = new Context(); // ignore auths context.turnOffAuthorisationSystem(); @@ -645,14 +678,14 @@ public void run() { String downloadDir = getExportDownloadDirectory(eperson); File dnDir = new File(downloadDir); if (!dnDir.exists() && !dnDir.mkdirs()) { - log.error("Unable to create download directory"); + logError("Unable to create download directory"); } Iterator iter = itemsMap.keySet().iterator(); while (iter.hasNext()) { String keyName = iter.next(); List uuids = itemsMap.get(keyName); - List items = new ArrayList(); + List items = new ArrayList<>(); for (UUID uuid : uuids) { items.add(itemService.find(context, uuid)); } @@ -664,7 +697,7 @@ public void run() { File wkDir = new File(workDir); if (!wkDir.exists() && !wkDir.mkdirs()) { - log.error("Unable to create working directory"); + logError("Unable to create working directory"); } @@ -755,7 +788,8 @@ public String getExportWorkDirectory() throws Exception { throw new Exception( "A dspace.cfg entry for 'org.dspace.app.itemexport.work.dir' does not exist."); } - return exportDir; + // clean work dir path from duplicate separators + return StringUtils.replace(exportDir, File.separator + File.separator, File.separator); } @Override @@ -876,14 +910,14 @@ public void deleteOldExportArchives(EPerson eperson) throws Exception { .getIntProperty("org.dspace.app.itemexport.life.span.hours"); Calendar now = Calendar.getInstance(); now.setTime(new Date()); - now.add(Calendar.HOUR, (-hours)); + now.add(Calendar.HOUR, -hours); File downloadDir = new File(getExportDownloadDirectory(eperson)); if (downloadDir.exists()) { File[] files = downloadDir.listFiles(); for (File file : files) { if (file.lastModified() < now.getTimeInMillis()) { if (!file.delete()) { - log.error("Unable to delete export file"); + logError("Unable to delete export file"); } } } @@ -896,7 +930,7 @@ public void deleteOldExportArchives() throws Exception { int hours = configurationService.getIntProperty("org.dspace.app.itemexport.life.span.hours"); Calendar now = Calendar.getInstance(); now.setTime(new Date()); - now.add(Calendar.HOUR, (-hours)); + now.add(Calendar.HOUR, -hours); File downloadDir = new File(configurationService.getProperty("org.dspace.app.itemexport.download.dir")); if (downloadDir.exists()) { // Get a list of all the sub-directories, potentially one for each ePerson. @@ -907,7 +941,7 @@ public void deleteOldExportArchives() throws Exception { for (File file : files) { if (file.lastModified() < now.getTimeInMillis()) { if (!file.delete()) { - log.error("Unable to delete old files"); + logError("Unable to delete old files"); } } } @@ -915,7 +949,7 @@ public void deleteOldExportArchives() throws Exception { // If the directory is now empty then we delete it too. if (dir.listFiles().length == 0) { if (!dir.delete()) { - log.error("Unable to delete directory"); + logError("Unable to delete directory"); } } } @@ -936,14 +970,14 @@ public void emailSuccessMessage(Context context, EPerson eperson, email.send(); } catch (Exception e) { - log.warn(LogManager.getHeader(context, "emailSuccessMessage", "cannot notify user of export"), e); + logWarn(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of export"), e); } } @Override public void emailErrorMessage(EPerson eperson, String error) throws MessagingException { - log.warn("An error occurred during item export, the user will be notified. " + error); + logWarn("An error occurred during item export, the user will be notified. " + error); try { Locale supportedLocale = I18nUtil.getEPersonLocale(eperson); Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "export_error")); @@ -953,7 +987,7 @@ public void emailErrorMessage(EPerson eperson, String error) email.send(); } catch (Exception e) { - log.warn("error during item export error notification", e); + logWarn("error during item export error notification", e); } } @@ -968,7 +1002,7 @@ public void zip(String strSource, String target) throws Exception { } File targetFile = new File(tempFileName); if (!targetFile.createNewFile()) { - log.warn("Target file already exists: " + targetFile.getName()); + logWarn("Target file already exists: " + targetFile.getName()); } FileOutputStream fos = new FileOutputStream(tempFileName); @@ -984,7 +1018,7 @@ public void zip(String strSource, String target) throws Exception { deleteDirectory(cpFile); if (!targetFile.renameTo(new File(target))) { - log.error("Unable to rename file"); + logError("Unable to rename file"); } } finally { if (cpZipOutputStream != null) { @@ -1017,8 +1051,11 @@ protected void zipFiles(File cpFile, String strSource, return; } String strAbsPath = cpFile.getPath(); - String strZipEntryName = strAbsPath.substring(strSource - .length() + 1, strAbsPath.length()); + int startIndex = strSource.length(); + if (!StringUtils.endsWith(strSource, File.separator)) { + startIndex++; + } + String strZipEntryName = strAbsPath.substring(startIndex, strAbsPath.length()); // byte[] b = new byte[ (int)(cpFile.length()) ]; @@ -1057,7 +1094,7 @@ protected boolean deleteDirectory(File path) { deleteDirectory(file); } else { if (!file.delete()) { - log.error("Unable to delete file: " + file.getName()); + logError("Unable to delete file: " + file.getName()); } } } @@ -1066,4 +1103,64 @@ protected boolean deleteDirectory(File path) { return (path.delete()); } + @Override + public void setHandler(DSpaceRunnableHandler handler) { + this.handler = handler; + } + + private void logInfo(String message) { + logInfo(message, null); + } + + private void logInfo(String message, Exception e) { + if (handler != null) { + handler.logInfo(message); + return; + } + + if (e != null) { + log.info(message, e); + } else { + log.info(message); + } + } + + private void logWarn(String message) { + logWarn(message, null); + } + + private void logWarn(String message, Exception e) { + if (handler != null) { + handler.logWarning(message); + return; + } + + if (e != null) { + log.warn(message, e); + } else { + log.warn(message); + } + } + + private void logError(String message) { + logError(message, null); + } + + private void logError(String message, Exception e) { + if (handler != null) { + if (e != null) { + handler.logError(message, e); + } else { + handler.logError(message); + } + return; + } + + if (e != null) { + log.error(message, e); + } else { + log.error(message); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/service/ItemExportService.java b/dspace-api/src/main/java/org/dspace/app/itemexport/service/ItemExportService.java index 7dedc9950b4f..6ec1027709bb 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/service/ItemExportService.java +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/service/ItemExportService.java @@ -17,6 +17,7 @@ import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.scripts.handler.DSpaceRunnableHandler; /** * Item exporter to create simple AIPs for DSpace content. Currently exports @@ -267,4 +268,10 @@ public void emailErrorMessage(EPerson eperson, String error) */ public void zip(String strSource, String target) throws Exception; + /** + * Set the DSpace Runnable Handler + * @param handler + */ + public void setHandler(DSpaceRunnableHandler handler); + } diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java new file mode 100644 index 000000000000..b32de11f7a7f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -0,0 +1,440 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemimport; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.nio.file.Files; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Optional; +import java.util.UUID; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.tika.Tika; +import org.dspace.app.itemimport.factory.ItemImportServiceFactory; +import org.dspace.app.itemimport.service.ItemImportService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Collection; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.utils.DSpace; + +/** + * Import items into DSpace. The conventional use is upload files by copying + * them. DSpace writes the item's bitstreams into its assetstore. Metadata is + * also loaded to the DSpace database. + *

+ * A second use assumes the bitstream files already exist in a storage + * resource accessible to DSpace. In this case the bitstreams are 'registered'. + * That is, the metadata is loaded to the DSpace database and DSpace is given + * the location of the file which is subsumed into DSpace. + *

+ * The distinction is controlled by the format of lines in the 'contents' file. + * See comments in processContentsFile() below. + *

+ * Modified by David Little, UCSD Libraries 12/21/04 to + * allow the registration of files (bitstreams) into DSpace. + */ +public class ItemImport extends DSpaceRunnable { + + public static String TEMP_DIR = "importSAF"; + public static String MAPFILE_FILENAME = "mapfile"; + public static String MAPFILE_BITSTREAM_TYPE = "importSAFMapfile"; + + protected boolean template = false; + protected String command = null; + protected String sourcedir = null; + protected String mapfile = null; + protected String eperson = null; + protected String[] collections = null; + protected boolean isTest = false; + protected boolean isExcludeContent = false; + protected boolean isResume = false; + protected boolean useWorkflow = false; + protected boolean useWorkflowSendEmail = false; + protected boolean isQuiet = false; + protected boolean commandLineCollections = false; + protected boolean zip = false; + protected boolean remoteUrl = false; + protected String zipfilename = null; + protected boolean zipvalid = false; + protected boolean help = false; + protected File workDir = null; + protected File workFile = null; + + protected static final CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + protected static final EPersonService epersonService = + EPersonServiceFactory.getInstance().getEPersonService(); + protected static final HandleService handleService = + HandleServiceFactory.getInstance().getHandleService(); + + @Override + public ItemImportScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("import", ItemImportScriptConfiguration.class); + } + + @Override + public void setup() throws ParseException { + help = commandLine.hasOption('h'); + + if (commandLine.hasOption('a')) { + command = "add"; + } + + if (commandLine.hasOption('r')) { + command = "replace"; + } + + if (commandLine.hasOption('d')) { + command = "delete"; + } + + if (commandLine.hasOption('w')) { + useWorkflow = true; + if (commandLine.hasOption('n')) { + useWorkflowSendEmail = true; + } + } + + if (commandLine.hasOption('v')) { + isTest = true; + handler.logInfo("**Test Run** - not actually importing items."); + } + + isExcludeContent = commandLine.hasOption('x'); + + if (commandLine.hasOption('p')) { + template = true; + } + + if (commandLine.hasOption('c')) { // collections + collections = commandLine.getOptionValues('c'); + commandLineCollections = true; + } else { + handler.logInfo("No collections given. Assuming 'collections' file inside item directory"); + } + + if (commandLine.hasOption('R')) { + isResume = true; + handler.logInfo("**Resume import** - attempting to import items not already imported"); + } + + if (commandLine.hasOption('q')) { + isQuiet = true; + } + + setZip(); + } + + @Override + public void internalRun() throws Exception { + if (help) { + printHelp(); + return; + } + + Date startTime = new Date(); + Context context = new Context(Context.Mode.BATCH_EDIT); + + setMapFile(); + + validate(context); + + setEPerson(context); + + // check collection + List mycollections = null; + // don't need to validate collections set if command is "delete" + // also if no collections are given in the command line + if (!"delete".equals(command) && commandLineCollections) { + handler.logInfo("Destination collections:"); + + mycollections = new ArrayList<>(); + + // validate each collection arg to see if it's a real collection + for (int i = 0; i < collections.length; i++) { + Collection collection = null; + if (collections[i] != null) { + // is the ID a handle? + if (collections[i].indexOf('/') != -1) { + // string has a / so it must be a handle - try and resolve + // it + collection = ((Collection) handleService + .resolveToObject(context, collections[i])); + } else { + // not a handle, try and treat it as an integer collection database ID + collection = collectionService.find(context, UUID.fromString(collections[i])); + } + } + + // was the collection valid? + if (collection == null + || collection.getType() != Constants.COLLECTION) { + throw new IllegalArgumentException("Cannot resolve " + + collections[i] + " to collection"); + } + + // add resolved collection to list + mycollections.add(collection); + + // print progress info + handler.logInfo((i == 0 ? "Owning " : "") + "Collection: " + collection.getName()); + } + } + // end validation + + // start + ItemImportService itemImportService = ItemImportServiceFactory.getInstance() + .getItemImportService(); + try { + itemImportService.setTest(isTest); + itemImportService.setExcludeContent(isExcludeContent); + itemImportService.setResume(isResume); + itemImportService.setUseWorkflow(useWorkflow); + itemImportService.setUseWorkflowSendEmail(useWorkflowSendEmail); + itemImportService.setQuiet(isQuiet); + itemImportService.setHandler(handler); + + try { + context.turnOffAuthorisationSystem(); + + readZip(context, itemImportService); + + process(context, itemImportService, mycollections); + + // complete all transactions + context.complete(); + } catch (Exception e) { + context.abort(); + throw new Exception( + "Error committing changes to database: " + e.getMessage() + ", aborting most recent changes", e); + } + + if (isTest) { + handler.logInfo("***End of Test Run***"); + } + } finally { + if (zip) { + // if zip file was valid then clean sourcedir + if (zipvalid && sourcedir != null && new File(sourcedir).exists()) { + FileUtils.deleteDirectory(new File(sourcedir)); + } + + // clean workdir + if (workDir != null && workDir.exists()) { + FileUtils.deleteDirectory(workDir); + } + + // conditionally clean workFile if import was done in the UI or via a URL and it still exists + if (workFile != null && workFile.exists()) { + workFile.delete(); + } + } + + Date endTime = new Date(); + handler.logInfo("Started: " + startTime.getTime()); + handler.logInfo("Ended: " + endTime.getTime()); + handler.logInfo( + "Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime + .getTime() - startTime.getTime()) + " msecs)"); + } + } + + /** + * Validate the options + * @param context + */ + protected void validate(Context context) { + // check zip type: uploaded file or remote url + if (commandLine.hasOption('z')) { + zipfilename = commandLine.getOptionValue('z'); + } else if (commandLine.hasOption('u')) { + remoteUrl = true; + zipfilename = commandLine.getOptionValue('u'); + } + if (StringUtils.isBlank(zipfilename)) { + throw new UnsupportedOperationException("Must run with either name of zip file or url of zip file"); + } + + if (command == null) { + handler.logError("Must run with either add, replace, or remove (run with -h flag for details)"); + throw new UnsupportedOperationException("Must run with either add, replace, or remove"); + } + + // can only resume for adds + if (isResume && !"add".equals(command)) { + handler.logError("Resume option only works with the --add command (run with -h flag for details)"); + throw new UnsupportedOperationException("Resume option only works with the --add command"); + } + + if (isResume && StringUtils.isBlank(mapfile)) { + handler.logError("The mapfile does not exist. "); + throw new UnsupportedOperationException("The mapfile does not exist"); + } + } + + /** + * Process the import + * @param context + * @param itemImportService + * @param collections + * @throws Exception + */ + protected void process(Context context, ItemImportService itemImportService, + List collections) throws Exception { + readMapfile(context); + + if ("add".equals(command)) { + itemImportService.addItems(context, collections, sourcedir, mapfile, template); + } else if ("replace".equals(command)) { + itemImportService.replaceItems(context, collections, sourcedir, mapfile, template); + } else if ("delete".equals(command)) { + itemImportService.deleteItems(context, mapfile); + } + + // write input stream on handler + File mapFile = new File(mapfile); + try (InputStream mapfileInputStream = new FileInputStream(mapFile)) { + handler.writeFilestream(context, MAPFILE_FILENAME, mapfileInputStream, MAPFILE_BITSTREAM_TYPE); + } finally { + mapFile.delete(); + } + } + + /** + * Read the ZIP archive in SAF format + * @param context + * @param itemImportService + * @throws Exception + */ + protected void readZip(Context context, ItemImportService itemImportService) throws Exception { + Optional optionalFileStream = Optional.empty(); + Optional validationFileStream = Optional.empty(); + if (!remoteUrl) { + // manage zip via upload + optionalFileStream = handler.getFileStream(context, zipfilename); + validationFileStream = handler.getFileStream(context, zipfilename); + } else { + // manage zip via remote url + optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + } + + if (validationFileStream.isPresent()) { + // validate zip file + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + + workFile = new File(itemImportService.getTempWorkDir() + File.separator + + zipfilename + "-" + context.getCurrentUser().getID()); + FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); + } else { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); + } + + /** + * Confirm that the zip file has the correct MIME type + * @param inputStream + */ + protected void validateZip(InputStream inputStream) { + Tika tika = new Tika(); + try { + String mimeType = tika.detect(inputStream); + if (mimeType.equals("application/zip")) { + zipvalid = true; + } else { + handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType); + throw new UnsupportedOperationException("A valid zip file must be supplied"); + } + } catch (IOException e) { + throw new IllegalArgumentException( + "There was an error while reading the zip file: " + zipfilename); + } + } + + /** + * Read the mapfile + * @param context + */ + protected void readMapfile(Context context) { + if (isResume) { + try { + Optional optionalFileStream = handler.getFileStream(context, mapfile); + if (optionalFileStream.isPresent()) { + File tempFile = File.createTempFile(mapfile, "temp"); + tempFile.deleteOnExit(); + FileUtils.copyInputStreamToFile(optionalFileStream.get(), tempFile); + mapfile = tempFile.getAbsolutePath(); + } + } catch (IOException | AuthorizeException e) { + throw new UnsupportedOperationException("The mapfile does not exist"); + } + } + } + + /** + * Set the mapfile option + * @throws IOException + */ + protected void setMapFile() throws IOException { + if (isResume && commandLine.hasOption('m')) { + mapfile = commandLine.getOptionValue('m'); + } else { + mapfile = Files.createTempFile(MAPFILE_FILENAME, "temp").toString(); + } + } + + /** + * Set the zip option + */ + protected void setZip() { + zip = true; + } + + /** + * Set the eperson in the context + * @param context + * @throws SQLException + */ + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson = epersonService.find(context, this.getEpersonIdentifier()); + + // check eperson + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + this.getEpersonIdentifier()); + throw new UnsupportedOperationException("EPerson cannot be found: " + this.getEpersonIdentifier()); + } + + context.setCurrentUser(myEPerson); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java new file mode 100644 index 000000000000..98d2469b7155 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java @@ -0,0 +1,187 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemimport; + +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.net.URL; +import java.sql.SQLException; +import java.util.List; +import java.util.Optional; +import java.util.UUID; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.itemimport.service.ItemImportService; +import org.dspace.content.Collection; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; + +/** + * CLI variant for the {@link ItemImport} class. + * This was done to specify the specific behaviors for the CLI. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemImportCLI extends ItemImport { + + @Override + protected void validate(Context context) { + // can only resume for adds + if (isResume && !"add".equals(command)) { + handler.logError("Resume option only works with the --add command (run with -h flag for details)"); + throw new UnsupportedOperationException("Resume option only works with the --add command"); + } + + if (commandLine.hasOption('e')) { + eperson = commandLine.getOptionValue('e'); + } + + // check eperson identifier (email or id) + if (eperson == null) { + handler.logError("An eperson to do the importing must be specified (run with -h flag for details)"); + throw new UnsupportedOperationException("An eperson to do the importing must be specified"); + } + + File myFile = null; + try { + myFile = new File(mapfile); + } catch (Exception e) { + throw new UnsupportedOperationException("The mapfile " + mapfile + " does not exist"); + } + + if (!isResume && "add".equals(command) && myFile.exists()) { + handler.logError("The mapfile " + mapfile + " already exists. " + + "Either delete it or use --resume if attempting to resume an aborted import. " + + "(run with -h flag for details)"); + throw new UnsupportedOperationException("The mapfile " + mapfile + " already exists"); + } + + if (command == null) { + handler.logError("Must run with either add, replace, or remove (run with -h flag for details)"); + throw new UnsupportedOperationException("Must run with either add, replace, or remove"); + } else if ("add".equals(command) || "replace".equals(command)) { + if (!remoteUrl && sourcedir == null) { + handler.logError("A source directory containing items must be set (run with -h flag for details)"); + throw new UnsupportedOperationException("A source directory containing items must be set"); + } + + if (mapfile == null) { + handler.logError( + "A map file to hold importing results must be specified (run with -h flag for details)"); + throw new UnsupportedOperationException("A map file to hold importing results must be specified"); + } + } else if ("delete".equals(command)) { + if (mapfile == null) { + handler.logError("A map file must be specified (run with -h flag for details)"); + throw new UnsupportedOperationException("A map file must be specified"); + } + } + } + + @Override + protected void process(Context context, ItemImportService itemImportService, + List collections) throws Exception { + if ("add".equals(command)) { + itemImportService.addItems(context, collections, sourcedir, mapfile, template); + } else if ("replace".equals(command)) { + itemImportService.replaceItems(context, collections, sourcedir, mapfile, template); + } else if ("delete".equals(command)) { + itemImportService.deleteItems(context, mapfile); + } + } + + @Override + protected void readZip(Context context, ItemImportService itemImportService) throws Exception { + // If this is a zip archive, unzip it first + if (zip) { + if (!remoteUrl) { + // confirm zip file exists + File myZipFile = new File(sourcedir + File.separator + zipfilename); + if ((!myZipFile.exists()) || (!myZipFile.isFile())) { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + + // validate zip file + InputStream validationFileStream = new FileInputStream(myZipFile); + validateZip(validationFileStream); + + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip( + new File(sourcedir + File.separator + zipfilename), workDir.getAbsolutePath()); + } else { + // manage zip via remote url + Optional optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (optionalFileStream.isPresent()) { + // validate zip file via url + Optional validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + + workFile = new File(itemImportService.getTempWorkDir() + File.separator + + zipfilename + "-" + context.getCurrentUser().getID()); + FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); + } else { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + } + } + } + + @Override + protected void setMapFile() { + if (commandLine.hasOption('m')) { + mapfile = commandLine.getOptionValue('m'); + } + } + + @Override + protected void setZip() { + if (commandLine.hasOption('s')) { // source + sourcedir = commandLine.getOptionValue('s'); + } + + if (commandLine.hasOption('z')) { + zip = true; + zipfilename = commandLine.getOptionValue('z'); + } + + if (commandLine.hasOption('u')) { // remote url + zip = true; + remoteUrl = true; + zipfilename = commandLine.getOptionValue('u'); + } + } + + @Override + protected void setEPerson(Context context) throws SQLException { + EPerson myEPerson = null; + if (StringUtils.contains(eperson, '@')) { + // @ sign, must be an email + myEPerson = epersonService.findByEmail(context, eperson); + } else { + myEPerson = epersonService.find(context, UUID.fromString(eperson)); + } + + // check eperson + if (myEPerson == null) { + handler.logError("EPerson cannot be found: " + eperson + " (run with -h flag for details)"); + throw new UnsupportedOperationException("EPerson cannot be found: " + eperson); + } + + context.setCurrentUser(myEPerson); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java new file mode 100644 index 000000000000..89abd7155b39 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLIScriptConfiguration.java @@ -0,0 +1,80 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemimport; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link ItemImportCLI} script + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemImportCLIScriptConfiguration extends ItemImportScriptConfiguration { + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption(Option.builder("a").longOpt("add") + .desc("add items to DSpace") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("r").longOpt("replace") + .desc("replace items in mapfile") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("d").longOpt("delete") + .desc("delete items listed in mapfile") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("s").longOpt("source") + .desc("source of items (directory)") + .hasArg().required(false).build()); + options.addOption(Option.builder("z").longOpt("zip") + .desc("name of zip file") + .hasArg().required(false).build()); + options.addOption(Option.builder("u").longOpt("url") + .desc("url of zip file") + .hasArg().build()); + options.addOption(Option.builder("c").longOpt("collection") + .desc("destination collection(s) Handle or database ID") + .hasArg().required(false).build()); + options.addOption(Option.builder("m").longOpt("mapfile") + .desc("mapfile items in mapfile") + .hasArg().required().build()); + options.addOption(Option.builder("e").longOpt("eperson") + .desc("email of eperson doing importing") + .hasArg().required().build()); + options.addOption(Option.builder("w").longOpt("workflow") + .desc("send submission through collection's workflow") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("n").longOpt("notify") + .desc("if sending submissions through the workflow, send notification emails") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("v").longOpt("validate") + .desc("test run - do not actually import items") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("x").longOpt("exclude-bitstreams") + .desc("do not load or expect content bitstreams") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("p").longOpt("template") + .desc("apply template") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("R").longOpt("resume") + .desc("resume a failed import (add only)") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("q").longOpt("quiet") + .desc("don't display metadata") + .hasArg(false).required(false).build()); + + options.addOption(Option.builder("h").longOpt("help") + .desc("help") + .hasArg(false).required(false).build()); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLITool.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLITool.java deleted file mode 100644 index 7cad97df3171..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLITool.java +++ /dev/null @@ -1,429 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.itemimport; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.UUID; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Options; -import org.dspace.app.itemimport.factory.ItemImportServiceFactory; -import org.dspace.app.itemimport.service.ItemImportService; -import org.dspace.content.Collection; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.core.Constants; -import org.dspace.core.Context; -import org.dspace.eperson.EPerson; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.eperson.service.EPersonService; -import org.dspace.handle.factory.HandleServiceFactory; -import org.dspace.handle.service.HandleService; - -/** - * Import items into DSpace. The conventional use is upload files by copying - * them. DSpace writes the item's bitstreams into its assetstore. Metadata is - * also loaded to the DSpace database. - *

- * A second use assumes the bitstream files already exist in a storage - * resource accessible to DSpace. In this case the bitstreams are 'registered'. - * That is, the metadata is loaded to the DSpace database and DSpace is given - * the location of the file which is subsumed into DSpace. - *

- * The distinction is controlled by the format of lines in the 'contents' file. - * See comments in processContentsFile() below. - *

- * Modified by David Little, UCSD Libraries 12/21/04 to - * allow the registration of files (bitstreams) into DSpace. - */ -public class ItemImportCLITool { - - private static boolean template = false; - - private static final CollectionService collectionService = ContentServiceFactory.getInstance() - .getCollectionService(); - private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService(); - private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); - - /** - * Default constructor - */ - private ItemImportCLITool() { } - - public static void main(String[] argv) throws Exception { - Date startTime = new Date(); - int status = 0; - - try { - // create an options object and populate it - CommandLineParser parser = new DefaultParser(); - - Options options = new Options(); - - options.addOption("a", "add", false, "add items to DSpace"); - options.addOption("r", "replace", false, "replace items in mapfile"); - options.addOption("d", "delete", false, - "delete items listed in mapfile"); - options.addOption("i", "inputtype", true, "input type in case of BTE import"); - options.addOption("s", "source", true, "source of items (directory)"); - options.addOption("z", "zip", true, "name of zip file"); - options.addOption("c", "collection", true, - "destination collection(s) Handle or database ID"); - options.addOption("m", "mapfile", true, "mapfile items in mapfile"); - options.addOption("e", "eperson", true, - "email of eperson doing importing"); - options.addOption("w", "workflow", false, - "send submission through collection's workflow"); - options.addOption("n", "notify", false, - "if sending submissions through the workflow, send notification emails"); - options.addOption("t", "test", false, - "test run - do not actually import items"); - options.addOption("p", "template", false, "apply template"); - options.addOption("R", "resume", false, - "resume a failed import (add only)"); - options.addOption("q", "quiet", false, "don't display metadata"); - - options.addOption("h", "help", false, "help"); - - CommandLine line = parser.parse(options, argv); - - String command = null; // add replace remove, etc - String bteInputType = null; //ris, endnote, tsv, csv, bibtex - String sourcedir = null; - String mapfile = null; - String eperson = null; // db ID or email - String[] collections = null; // db ID or handles - boolean isTest = false; - boolean isResume = false; - boolean useWorkflow = false; - boolean useWorkflowSendEmail = false; - boolean isQuiet = false; - - if (line.hasOption('h')) { - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("ItemImport\n", options); - System.out - .println("\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile"); - System.out - .println( - "\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z " + - "filename.zip -m mapfile"); - System.out - .println("replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile"); - System.out - .println("deleting items: ItemImport -d -e eperson -m mapfile"); - System.out - .println( - "If multiple collections are specified, the first collection will be the one that owns the " + - "item."); - - System.exit(0); - } - - if (line.hasOption('a')) { - command = "add"; - } - - if (line.hasOption('r')) { - command = "replace"; - } - - if (line.hasOption('d')) { - command = "delete"; - } - - if (line.hasOption('b')) { - command = "add-bte"; - } - - if (line.hasOption('i')) { - bteInputType = line.getOptionValue('i'); - } - - if (line.hasOption('w')) { - useWorkflow = true; - if (line.hasOption('n')) { - useWorkflowSendEmail = true; - } - } - - if (line.hasOption('t')) { - isTest = true; - System.out.println("**Test Run** - not actually importing items."); - } - - if (line.hasOption('p')) { - template = true; - } - - if (line.hasOption('s')) { // source - sourcedir = line.getOptionValue('s'); - } - - if (line.hasOption('m')) { // mapfile - mapfile = line.getOptionValue('m'); - } - - if (line.hasOption('e')) { // eperson - eperson = line.getOptionValue('e'); - } - - if (line.hasOption('c')) { // collections - collections = line.getOptionValues('c'); - } - - if (line.hasOption('R')) { - isResume = true; - System.out - .println("**Resume import** - attempting to import items not already imported"); - } - - if (line.hasOption('q')) { - isQuiet = true; - } - - boolean zip = false; - String zipfilename = ""; - if (line.hasOption('z')) { - zip = true; - zipfilename = line.getOptionValue('z'); - } - - //By default assume collections will be given on the command line - boolean commandLineCollections = true; - // now validate - // must have a command set - if (command == null) { - System.out - .println("Error - must run with either add, replace, or remove (run with -h flag for details)"); - System.exit(1); - } else if ("add".equals(command) || "replace".equals(command)) { - if (sourcedir == null) { - System.out - .println("Error - a source directory containing items must be set"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); - } - - if (mapfile == null) { - System.out - .println("Error - a map file to hold importing results must be specified"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); - } - - if (eperson == null) { - System.out - .println("Error - an eperson to do the importing must be specified"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); - } - - if (collections == null) { - System.out.println("No collections given. Assuming 'collections' file inside item directory"); - commandLineCollections = false; - } - } else if ("add-bte".equals(command)) { - //Source dir can be null, the user can specify the parameters for his loader in the Spring XML - // configuration file - - if (mapfile == null) { - System.out - .println("Error - a map file to hold importing results must be specified"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); - } - - if (eperson == null) { - System.out - .println("Error - an eperson to do the importing must be specified"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); - } - - if (collections == null) { - System.out.println("No collections given. Assuming 'collections' file inside item directory"); - commandLineCollections = false; - } - - if (bteInputType == null) { - System.out - .println( - "Error - an input type (tsv, csv, ris, endnote, bibtex or any other type you have " + - "specified in BTE Spring XML configuration file) must be specified"); - System.out.println(" (run with -h flag for details)"); - System.exit(1); - } - } else if ("delete".equals(command)) { - if (eperson == null) { - System.out - .println("Error - an eperson to do the importing must be specified"); - System.exit(1); - } - - if (mapfile == null) { - System.out.println("Error - a map file must be specified"); - System.exit(1); - } - } - - // can only resume for adds - if (isResume && !"add".equals(command) && !"add-bte".equals(command)) { - System.out - .println("Error - resume option only works with the --add or the --add-bte commands"); - System.exit(1); - } - - // do checks around mapfile - if mapfile exists and 'add' is selected, - // resume must be chosen - File myFile = new File(mapfile); - - if (!isResume && "add".equals(command) && myFile.exists()) { - System.out.println("Error - the mapfile " + mapfile - + " already exists."); - System.out - .println("Either delete it or use --resume if attempting to resume an aborted import."); - System.exit(1); - } - - ItemImportService myloader = ItemImportServiceFactory.getInstance().getItemImportService(); - myloader.setTest(isTest); - myloader.setResume(isResume); - myloader.setUseWorkflow(useWorkflow); - myloader.setUseWorkflowSendEmail(useWorkflowSendEmail); - myloader.setQuiet(isQuiet); - - // create a context - Context c = new Context(Context.Mode.BATCH_EDIT); - - // find the EPerson, assign to context - EPerson myEPerson = null; - - if (eperson.indexOf('@') != -1) { - // @ sign, must be an email - myEPerson = epersonService.findByEmail(c, eperson); - } else { - myEPerson = epersonService.find(c, UUID.fromString(eperson)); - } - - if (myEPerson == null) { - System.out.println("Error, eperson cannot be found: " + eperson); - System.exit(1); - } - - c.setCurrentUser(myEPerson); - - // find collections - List mycollections = null; - - // don't need to validate collections set if command is "delete" - // also if no collections are given in the command line - if (!"delete".equals(command) && commandLineCollections) { - System.out.println("Destination collections:"); - - mycollections = new ArrayList<>(); - - // validate each collection arg to see if it's a real collection - for (int i = 0; i < collections.length; i++) { - // is the ID a handle? - if (collections[i].indexOf('/') != -1) { - // string has a / so it must be a handle - try and resolve - // it - mycollections.add((Collection) handleService - .resolveToObject(c, collections[i])); - - // resolved, now make sure it's a collection - if ((mycollections.get(i) == null) - || (mycollections.get(i).getType() != Constants.COLLECTION)) { - mycollections.set(i, null); - } - } else if (collections[i] != null) { - // not a handle, try and treat it as an integer collection database ID - mycollections.set(i, collectionService.find(c, UUID.fromString(collections[i]))); - } - - // was the collection valid? - if (mycollections.get(i) == null) { - throw new IllegalArgumentException("Cannot resolve " - + collections[i] + " to collection"); - } - - // print progress info - String owningPrefix = ""; - - if (i == 0) { - owningPrefix = "Owning "; - } - - System.out.println(owningPrefix + " Collection: " - + mycollections.get(i).getName()); - } - } // end of validating collections - - try { - // If this is a zip archive, unzip it first - if (zip) { - sourcedir = myloader.unzip(sourcedir, zipfilename); - } - - - c.turnOffAuthorisationSystem(); - - if ("add".equals(command)) { - myloader.addItems(c, mycollections, sourcedir, mapfile, template); - } else if ("replace".equals(command)) { - myloader.replaceItems(c, mycollections, sourcedir, mapfile, template); - } else if ("delete".equals(command)) { - myloader.deleteItems(c, mapfile); - } - - // complete all transactions - c.complete(); - } catch (Exception e) { - c.abort(); - e.printStackTrace(); - System.out.println(e); - status = 1; - } - - // Delete the unzipped file - try { - if (zip) { - System.gc(); - System.out.println( - "Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath()); - myloader.cleanupZipTemp(); - } - } catch (IOException ex) { - System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile() - .getAbsolutePath()); - } - - - if (isTest) { - System.out.println("***End of Test Run***"); - } - } finally { - Date endTime = new Date(); - System.out.println("Started: " + startTime.getTime()); - System.out.println("Ended: " + endTime.getTime()); - System.out.println( - "Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime - .getTime() - startTime.getTime()) + " msecs)"); - } - - System.exit(status); - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java new file mode 100644 index 000000000000..3f2675ea58f1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java @@ -0,0 +1,90 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemimport; + +import java.io.InputStream; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link ItemImport} script + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemImportScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + Options options = new Options(); + + options.addOption(Option.builder("a").longOpt("add") + .desc("add items to DSpace") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("r").longOpt("replace") + .desc("replace items in mapfile") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("d").longOpt("delete") + .desc("delete items listed in mapfile") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("z").longOpt("zip") + .desc("name of zip file") + .type(InputStream.class) + .hasArg().build()); + options.addOption(Option.builder("u").longOpt("url") + .desc("url of zip file") + .hasArg().build()); + options.addOption(Option.builder("c").longOpt("collection") + .desc("destination collection(s) Handle or database ID") + .hasArg().required(false).build()); + options.addOption(Option.builder("m").longOpt("mapfile") + .desc("mapfile items in mapfile") + .type(InputStream.class) + .hasArg().required(false).build()); + options.addOption(Option.builder("w").longOpt("workflow") + .desc("send submission through collection's workflow") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("n").longOpt("notify") + .desc("if sending submissions through the workflow, send notification emails") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("v").longOpt("validate") + .desc("test run - do not actually import items") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("x").longOpt("exclude-bitstreams") + .desc("do not load or expect content bitstreams") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("p").longOpt("template") + .desc("apply template") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("R").longOpt("resume") + .desc("resume a failed import (add only)") + .hasArg(false).required(false).build()); + options.addOption(Option.builder("q").longOpt("quiet") + .desc("don't display metadata") + .hasArg(false).required(false).build()); + + options.addOption(Option.builder("h").longOpt("help") + .desc("help") + .hasArg(false).required(false).build()); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java index c89b2d07238e..255f4bdcbb15 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportServiceImpl.java @@ -7,6 +7,13 @@ */ package org.dspace.app.itemimport; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_HEIGHT_QUALIFIER; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_IMAGE_ELEMENT; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_LABEL_ELEMENT; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_SCHEMA; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_TOC_ELEMENT; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_WIDTH_QUALIFIER; + import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; @@ -44,6 +51,10 @@ import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.commons.collections4.ComparatorUtils; import org.apache.commons.io.FileDeleteStrategy; @@ -51,10 +62,11 @@ import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.xpath.XPathAPI; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.app.util.LocalSchemaFilenameFilter; +import org.dspace.app.util.RelationshipUtils; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; @@ -68,6 +80,9 @@ import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.MetadataValue; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; import org.dspace.content.WorkspaceItem; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; @@ -77,17 +92,21 @@ import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; +import org.dspace.content.service.MetadataValueService; +import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.dspace.handle.service.HandleService; +import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.services.ConfigurationService; import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowService; @@ -117,7 +136,9 @@ * allow the registration of files (bitstreams) into DSpace. */ public class ItemImportServiceImpl implements ItemImportService, InitializingBean { - private final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemImportServiceImpl.class); + private final Logger log = LogManager.getLogger(); + + private DSpaceRunnableHandler handler; @Autowired(required = true) protected AuthorizeService authorizeService; @@ -151,15 +172,25 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea protected WorkflowService workflowService; @Autowired(required = true) protected ConfigurationService configurationService; + @Autowired(required = true) + protected RelationshipService relationshipService; + @Autowired(required = true) + protected RelationshipTypeService relationshipTypeService; + @Autowired(required = true) + protected MetadataValueService metadataValueService; protected String tempWorkDir; protected boolean isTest = false; + protected boolean isExcludeContent = false; protected boolean isResume = false; protected boolean useWorkflow = false; protected boolean useWorkflowSendEmail = false; protected boolean isQuiet = false; + //remember which folder item was imported from + Map itemFolderMap = null; + @Override public void afterPropertiesSet() throws Exception { tempWorkDir = configurationService.getProperty("org.dspace.app.batchitemimport.work.dir"); @@ -168,11 +199,13 @@ public void afterPropertiesSet() throws Exception { if (!tempWorkDirFile.exists()) { boolean success = tempWorkDirFile.mkdir(); if (success) { - log.info("Created org.dspace.app.batchitemimport.work.dir of: " + tempWorkDir); + logInfo("Created org.dspace.app.batchitemimport.work.dir of: " + tempWorkDir); } else { - log.error("Cannot create batch import directory! " + tempWorkDir); + logError("Cannot create batch import directory! " + tempWorkDir); } } + // clean work dir path from duplicate separators + tempWorkDir = StringUtils.replace(tempWorkDir, File.separator + File.separator, File.separator); } // File listing filter to look for metadata files @@ -198,9 +231,9 @@ public void addItemsAtomic(Context c, List mycollections, String sou try { addItems(c, mycollections, sourceDir, mapFile, template); } catch (Exception addException) { - log.error("AddItems encountered an error, will try to revert. Error: " + addException.getMessage()); + logError("AddItems encountered an error, will try to revert. Error: " + addException.getMessage()); deleteItems(c, mapFile); - log.info("Attempted to delete partial (errored) import"); + logInfo("Attempted to delete partial (errored) import"); throw addException; } } @@ -211,14 +244,15 @@ public void addItems(Context c, List mycollections, // create the mapfile File outFile = null; PrintWriter mapOut = null; + try { Map skipItems = new HashMap<>(); // set of items to skip if in 'resume' // mode - System.out.println("Adding items from directory: " + sourceDir); - log.debug("Adding items from directory: " + sourceDir); - System.out.println("Generating mapfile: " + mapFile); - log.debug("Generating mapfile: " + mapFile); + itemFolderMap = new HashMap<>(); + + logDebug("Adding items from directory: " + sourceDir); + logDebug("Generating mapfile: " + mapFile); boolean directoryFileCollections = false; if (mycollections == null) { @@ -235,16 +269,12 @@ public void addItems(Context c, List mycollections, // sneaky isResume == true means open file in append mode outFile = new File(mapFile); mapOut = new PrintWriter(new FileWriter(outFile, isResume)); - - if (mapOut == null) { - throw new Exception("can't open mapfile: " + mapFile); - } } // open and process the source directory File d = new java.io.File(sourceDir); - if (d == null || !d.isDirectory()) { + if (!d.isDirectory()) { throw new Exception("Error, cannot open source directory " + sourceDir); } @@ -254,7 +284,13 @@ public void addItems(Context c, List mycollections, for (int i = 0; i < dircontents.length; i++) { if (skipItems.containsKey(dircontents[i])) { - System.out.println("Skipping import of " + dircontents[i]); + logInfo("Skipping import of " + dircontents[i]); + + //we still need the item in the map for relationship linking + String skippedHandle = skipItems.get(dircontents[i]); + Item skippedItem = (Item) handleService.resolveToObject(c, skippedHandle); + itemFolderMap.put(dircontents[i], skippedItem); + } else { List clist; if (directoryFileCollections) { @@ -262,24 +298,30 @@ public void addItems(Context c, List mycollections, try { List cols = processCollectionFile(c, path, "collections"); if (cols == null) { - System.out - .println("No collections specified for item " + dircontents[i] + ". Skipping."); + logError("No collections specified for item " + dircontents[i] + ". Skipping."); continue; } clist = cols; } catch (IllegalArgumentException e) { - System.out.println(e.getMessage() + " Skipping."); + logError(e.getMessage() + " Skipping."); continue; } } else { clist = mycollections; } + Item item = addItem(c, clist, sourceDir, dircontents[i], mapOut, template); + + itemFolderMap.put(dircontents[i], item); + c.uncacheEntity(item); - System.out.println(i + " " + dircontents[i]); + logInfo(i + " " + dircontents[i]); } } + //now that all items are imported, iterate again to link relationships + addRelationships(c, sourceDir); + } finally { if (mapOut != null) { mapOut.flush(); @@ -288,13 +330,315 @@ public void addItems(Context c, List mycollections, } } + /** + * Add relationships from a 'relationships' manifest file. + * + * @param c Context + * @param sourceDir The parent import source directory + * @throws Exception + */ + protected void addRelationships(Context c, String sourceDir) throws Exception { + + for (Map.Entry itemEntry : itemFolderMap.entrySet()) { + + String folderName = itemEntry.getKey(); + String path = sourceDir + File.separatorChar + folderName; + Item item = itemEntry.getValue(); + + //look for a 'relationship' manifest + Map> relationships = processRelationshipFile(path, "relationships"); + if (!relationships.isEmpty()) { + + for (Map.Entry> relEntry : relationships.entrySet()) { + + String relationshipType = relEntry.getKey(); + List identifierList = relEntry.getValue(); + + for (String itemIdentifier : identifierList) { + + if (isTest) { + logInfo("\tAdding relationship (type: " + relationshipType + + ") from " + folderName + " to " + itemIdentifier); + continue; + } + + //find referenced item + Item relationItem = resolveRelatedItem(c, itemIdentifier); + if (null == relationItem) { + throw new Exception("Could not find item for " + itemIdentifier); + } + + addRelationship(c, item, relationItem, relationshipType); + } + + } + + } + + } + + } + + /** + * Add relationship. + * @param c the context + * @param item the item + * @param relationItem the related item + * @param relationshipType the relation type name + * @throws SQLException + * @throws AuthorizeException + */ + protected void addRelationship(Context c, Item item, Item relationItem, String relationshipType) + throws SQLException, AuthorizeException { + // get entity type of entity and item + String itemEntityType = getEntityType(item); + String relatedEntityType = getEntityType(relationItem); + + // find matching relationship type + List relTypes = relationshipTypeService.findByLeftwardOrRightwardTypeName( + c, relationshipType); + RelationshipType foundRelationshipType = RelationshipUtils.matchRelationshipType( + relTypes, relatedEntityType, itemEntityType, relationshipType); + + if (foundRelationshipType == null) { + throw new IllegalArgumentException("No Relationship type found for:\n" + + "Target type: " + relatedEntityType + "\n" + + "Origin referer type: " + itemEntityType + "\n" + + "with typeName: " + relationshipType + ); + } + + boolean left = false; + if (foundRelationshipType.getLeftwardType().equalsIgnoreCase(relationshipType)) { + left = true; + } + + // placeholder items for relation placing + Item leftItem = null; + Item rightItem = null; + if (left) { + leftItem = item; + rightItem = relationItem; + } else { + leftItem = relationItem; + rightItem = item; + } + + // Create the relationship, appending to the end + Relationship persistedRelationship = relationshipService.create( + c, leftItem, rightItem, foundRelationshipType, -1, -1 + ); + relationshipService.update(c, persistedRelationship); + + logInfo("\tAdded relationship (type: " + relationshipType + ") from " + + leftItem.getHandle() + " to " + rightItem.getHandle()); + } + + /** + * Get the item's entity type from meta. + * + * @param item + * @return + */ + protected String getEntityType(Item item) { + return itemService.getMetadata(item, "dspace", "entity", "type", Item.ANY).get(0).getValue(); + } + + /** + * Read the relationship manifest file. + * + * Each line in the file contains a relationship type id and an item + * identifier in the following format: + * + *

+ * {@code relation. } + * + *

+ * The {@code input_item_folder} should refer the folder name of another + * item in this import batch. + * + * @param path The main import folder path. + * @param filename The name of the manifest file to check ('relationships') + * @return Map of found relationships + * @throws Exception + */ + protected Map> processRelationshipFile(String path, String filename) throws Exception { + + File file = new File(path + File.separatorChar + filename); + Map> result = new HashMap<>(); + + if (file.exists()) { + + logInfo("\tProcessing relationships file: " + filename); + + BufferedReader br = null; + try { + br = new BufferedReader(new FileReader(file)); + String line = null; + while ((line = br.readLine()) != null) { + line = line.trim(); + if ("".equals(line)) { + continue; + } + + String relationshipType = null; + String itemIdentifier = null; + + StringTokenizer st = new StringTokenizer(line); + + if (st.hasMoreTokens()) { + relationshipType = st.nextToken(); + if (relationshipType.split("\\.").length > 1) { + relationshipType = relationshipType.split("\\.")[1]; + } + } else { + throw new Exception("Bad mapfile line:\n" + line); + } + + if (st.hasMoreTokens()) { + itemIdentifier = st.nextToken("").trim(); + } else { + throw new Exception("Bad mapfile line:\n" + line); + } + + if (!result.containsKey(relationshipType)) { + result.put(relationshipType, new ArrayList<>()); + } + + result.get(relationshipType).add(itemIdentifier); + + } + + } catch (FileNotFoundException e) { + logWarn("\tNo relationships file found."); + } finally { + if (br != null) { + try { + br.close(); + } catch (IOException e) { + logError("Non-critical problem releasing resources."); + } + } + } + + } + + return result; + } + + /** + * Resolve an item identifier referred to in the relationships manifest file. + * + * The import item map will be checked first to see if the identifier refers to an item folder + * that was just imported. Next it will try to find the item by handle or UUID, or by a unique + * meta value. + * + * @param c Context + * @param itemIdentifier The identifier string found in the import manifest (handle, uuid, or import subfolder) + * @return Item if found, or null. + * @throws Exception + */ + protected Item resolveRelatedItem(Context c, String itemIdentifier) throws Exception { + + if (itemIdentifier.contains(":")) { + + if (itemIdentifier.startsWith("folderName:") || itemIdentifier.startsWith("rowName:")) { + //identifier refers to a folder name in this import + int i = itemIdentifier.indexOf(":"); + String folderName = itemIdentifier.substring(i + 1); + if (itemFolderMap.containsKey(folderName)) { + return itemFolderMap.get(folderName); + } + + } else { + + //lookup by meta value + int i = itemIdentifier.indexOf(":"); + String metaKey = itemIdentifier.substring(0, i); + String metaValue = itemIdentifier.substring(i + 1); + return findItemByMetaValue(c, metaKey, metaValue); + + } + + } + + // resolve item by handle or UUID + return resolveItem(c, itemIdentifier); + + } + + /** + * Resolve an item identifier. + * + * @param c Context + * @param itemIdentifier The identifier string found in the import file (handle or UUID) + * @return Item if found, or null. + * @throws SQLException + * @throws IllegalStateException + * @throws Exception + */ + protected Item resolveItem(Context c, String itemIdentifier) + throws IllegalStateException, SQLException { + if (itemIdentifier.indexOf('/') != -1) { + // resolve by handle + return (Item) handleService.resolveToObject(c, itemIdentifier); + } + + // resolve by UUID + return itemService.findByIdOrLegacyId(c, itemIdentifier); + } + + /** + * Lookup an item by a (unique) meta value. + * + * @param c current DSpace session. + * @param metaKey name of the metadata field to match. + * @param metaValue value to be matched. + * @return the matching Item. + * @throws Exception if single item not found. + */ + protected Item findItemByMetaValue(Context c, String metaKey, String metaValue) throws Exception { + + Item item = null; + + String mf[] = metaKey.split("\\."); + if (mf.length < 2) { + throw new Exception("Bad metadata field in reference: '" + metaKey + + "' (expected syntax is schema.element[.qualifier])"); + } + String schema = mf[0]; + String element = mf[1]; + String qualifier = mf.length == 2 ? null : mf[2]; + try { + MetadataField mfo = metadataFieldService.findByElement(c, schema, element, qualifier); + Iterator mdv = metadataValueService.findByFieldAndValue(c, mfo, metaValue); + if (mdv.hasNext()) { + MetadataValue mdvVal = mdv.next(); + UUID uuid = mdvVal.getDSpaceObject().getID(); + if (mdv.hasNext()) { + throw new Exception("Ambiguous reference; multiple matches in db: " + metaKey); + } + item = itemService.find(c, uuid); + } + } catch (SQLException e) { + throw new Exception("Error looking up item by metadata reference: " + metaKey, e); + } + + if (item == null) { + throw new Exception("Item not found by metadata reference: " + metaKey); + } + + return item; + + } + @Override public void replaceItems(Context c, List mycollections, String sourceDir, String mapFile, boolean template) throws Exception { // verify the source directory File d = new java.io.File(sourceDir); - if (d == null || !d.isDirectory()) { + if (!d.isDirectory()) { throw new Exception("Error, cannot open source directory " + sourceDir); } @@ -312,7 +656,7 @@ public void replaceItems(Context c, List mycollections, Item oldItem = null; if (oldHandle.indexOf('/') != -1) { - System.out.println("\tReplacing: " + oldHandle); + logInfo("\tReplacing: " + oldHandle); // add new item, locate old one oldItem = (Item) handleService.resolveToObject(c, oldHandle); @@ -333,10 +677,6 @@ public void replaceItems(Context c, List mycollections, File handleFile = new File(sourceDir + File.separatorChar + newItemName + File.separatorChar + "handle"); PrintWriter handleOut = new PrintWriter(new FileWriter(handleFile, true)); - if (handleOut == null) { - throw new Exception("can't open handle file: " + handleFile.getCanonicalPath()); - } - handleOut.println(oldHandle); handleOut.close(); @@ -349,7 +689,7 @@ public void replaceItems(Context c, List mycollections, @Override public void deleteItems(Context c, String mapFile) throws Exception { - System.out.println("Deleting items listed in mapfile: " + mapFile); + logInfo("Deleting items listed in mapfile: " + mapFile); // read in the mapfile Map myhash = readMapFile(mapFile); @@ -362,12 +702,12 @@ public void deleteItems(Context c, String mapFile) throws Exception { if (itemID.indexOf('/') != -1) { String myhandle = itemID; - System.out.println("Deleting item " + myhandle); + logInfo("Deleting item " + myhandle); deleteItem(c, myhandle); } else { // it's an ID Item myitem = itemService.findByIdOrLegacyId(c, itemID); - System.out.println("Deleting item " + itemID); + logInfo("Deleting item " + itemID); deleteItem(c, myitem); c.uncacheEntity(myitem); } @@ -390,8 +730,7 @@ protected Item addItem(Context c, List mycollections, String path, String itemname, PrintWriter mapOut, boolean template) throws Exception { String mapOutputString = null; - System.out.println("Adding item from directory " + itemname); - log.debug("adding item from directory " + itemname); + logDebug("adding item from directory " + itemname); // create workspace item Item myitem = null; @@ -435,10 +774,14 @@ protected Item addItem(Context c, List mycollections, String path, // put item in system if (!isTest) { try { + // Add provenance info + String provenance = installItemService.getSubmittedByProvenanceMessage(c, wi.getItem()); + itemService.addMetadata(c, wi.getItem(), MetadataSchemaEnum.DC.getName(), + "description", "provenance", "en", provenance); installItemService.installItem(c, wi, myhandle); } catch (Exception e) { workspaceItemService.deleteAll(c, wi); - log.error("Exception after install item, try to revert...", e); + logError("Exception after install item, try to revert...", e); throw e; } @@ -450,7 +793,7 @@ protected Item addItem(Context c, List mycollections, String path, // set permissions if specified in contents file if (options.size() > 0) { - System.out.println("Processing options"); + logInfo("Processing options"); processOptions(c, myitem, options); } } @@ -501,7 +844,7 @@ protected void deleteItem(Context c, String myhandle) throws Exception { Item myitem = (Item) handleService.resolveToObject(c, myhandle); if (myitem == null) { - System.out.println("Error - cannot locate item - already deleted?"); + logError("Error - cannot locate item - already deleted?"); } else { deleteItem(c, myitem); c.uncacheEntity(myitem); @@ -554,7 +897,7 @@ protected Map readMapFile(String filename) throws Exception { // Load all metadata schemas into the item. protected void loadMetadata(Context c, Item myitem, String path) throws SQLException, IOException, ParserConfigurationException, - SAXException, TransformerException, AuthorizeException { + SAXException, TransformerException, AuthorizeException, XPathExpressionException { // Load the dublin core metadata loadDublinCore(c, myitem, path + "dublin_core.xml"); @@ -568,14 +911,15 @@ protected void loadMetadata(Context c, Item myitem, String path) protected void loadDublinCore(Context c, Item myitem, String filename) throws SQLException, IOException, ParserConfigurationException, - SAXException, TransformerException, AuthorizeException { + SAXException, TransformerException, AuthorizeException, XPathExpressionException { Document document = loadXML(filename); // Get the schema, for backward compatibility we will default to the // dublin core schema if the schema name is not available in the import // file String schema; - NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core"); + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList metadata = (NodeList) xPath.compile("/dublin_core").evaluate(document, XPathConstants.NODESET); Node schemaAttr = metadata.item(0).getAttributes().getNamedItem( "schema"); if (schemaAttr == null) { @@ -585,11 +929,10 @@ protected void loadDublinCore(Context c, Item myitem, String filename) } // Get the nodes corresponding to formats - NodeList dcNodes = XPathAPI.selectNodeList(document, - "/dublin_core/dcvalue"); + NodeList dcNodes = (NodeList) xPath.compile("/dublin_core/dcvalue").evaluate(document, XPathConstants.NODESET); if (!isQuiet) { - System.out.println("\tLoading dublin core from " + filename); + logInfo("\tLoading dublin core from " + filename); } // Add each one as a new format to the registry @@ -613,13 +956,14 @@ protected void addDCValue(Context c, Item i, String schema, Node n) String qualifier = getAttributeValue(n, "qualifier"); //NodeValue(); // //getElementData(n, // "qualifier"); - String language = getAttributeValue(n, "language"); - if (language != null) { - language = language.trim(); + + String language = null; + if (StringUtils.isNotBlank(getAttributeValue(n, "language"))) { + language = getAttributeValue(n, "language").trim(); } if (!isQuiet) { - System.out.println("\tSchema: " + schema + " Element: " + element + " Qualifier: " + qualifier + logInfo("\tSchema: " + schema + " Element: " + element + " Qualifier: " + qualifier + " Value: " + value); } @@ -628,20 +972,28 @@ protected void addDCValue(Context c, Item i, String schema, Node n) } // only add metadata if it is no test and there is an actual value if (!isTest && !value.equals("")) { - itemService.addMetadata(c, i, schema, element, qualifier, language, value); + if (StringUtils.equals(schema, MetadataSchemaEnum.RELATION.getName())) { + Item relationItem = resolveItem(c, value); + if (relationItem == null) { + throw new IllegalArgumentException("No item found with id=" + value); + } + addRelationship(c, i, relationItem, element); + } else { + itemService.addMetadata(c, i, schema, element, qualifier, language, value); + } } else { // If we're just test the import, let's check that the actual metadata field exists. MetadataSchema foundSchema = metadataSchemaService.find(c, schema); if (foundSchema == null) { - System.out.println("ERROR: schema '" + schema + "' was not found in the registry."); + logError("ERROR: schema '" + schema + "' was not found in the registry."); return; } MetadataField foundField = metadataFieldService.findByElement(c, foundSchema, element, qualifier); if (foundField == null) { - System.out.println( + logError( "ERROR: Metadata field: '" + schema + "." + element + "." + qualifier + "' was not found in the " + "registry."); return; @@ -668,7 +1020,7 @@ protected List processCollectionFile(Context c, String path, String File file = new File(path + File.separatorChar + filename); ArrayList collections = new ArrayList<>(); List result = null; - System.out.println("Processing collections file: " + filename); + logInfo("Processing collections file: " + filename); if (file.exists()) { BufferedReader br = null; @@ -695,13 +1047,13 @@ protected List processCollectionFile(Context c, String path, String result = collections; } catch (FileNotFoundException e) { - System.out.println("No collections file found."); + logWarn("No collections file found."); } finally { if (br != null) { try { br.close(); } catch (IOException e) { - System.out.println("Non-critical problem releasing resources."); + logError("Non-critical problem releasing resources."); } } } @@ -723,7 +1075,7 @@ protected String processHandleFile(Context c, Item i, String path, String filena File file = new File(path + File.separatorChar + filename); String result = null; - System.out.println("Processing handle file: " + filename); + logInfo("Processing handle file: " + filename); if (file.exists()) { BufferedReader is = null; try { @@ -732,14 +1084,14 @@ protected String processHandleFile(Context c, Item i, String path, String filena // result gets contents of file, or null result = is.readLine(); - System.out.println("read handle: '" + result + "'"); + logInfo("read handle: '" + result + "'"); } catch (FileNotFoundException e) { // probably no handle file, just return null - System.out.println("It appears there is no handle file -- generating one"); + logWarn("It appears there is no handle file -- generating one"); } catch (IOException e) { // probably no handle file, just return null - System.out.println("It appears there is no handle file -- generating one"); + logWarn("It appears there is no handle file -- generating one"); } finally { if (is != null) { try { @@ -751,7 +1103,7 @@ protected String processHandleFile(Context c, Item i, String path, String filena } } else { // probably no handle file, just return null - System.out.println("It appears there is no handle file -- generating one"); + logWarn("It appears there is no handle file -- generating one"); } return result; @@ -778,7 +1130,7 @@ protected List processContentsFile(Context c, Item i, String path, String line = ""; List options = new ArrayList<>(); - System.out.println("\tProcessing contents file: " + contentsFile); + logInfo("\tProcessing contents file: " + contentsFile); if (contentsFile.exists()) { BufferedReader is = null; @@ -825,9 +1177,9 @@ protected List processContentsFile(Context c, Item i, String path, } } // while if (iAssetstore == -1 || sFilePath == null) { - System.out.println("\tERROR: invalid contents file line"); - System.out.println("\t\tSkipping line: " - + sRegistrationLine); + logError("\tERROR: invalid contents file line"); + logInfo("\t\tSkipping line: " + + sRegistrationLine); continue; } @@ -850,10 +1202,10 @@ protected List processContentsFile(Context c, Item i, String path, } registerBitstream(c, i, iAssetstore, sFilePath, sBundle, sDescription); - System.out.println("\tRegistering Bitstream: " + sFilePath - + "\tAssetstore: " + iAssetstore - + "\tBundle: " + sBundle - + "\tDescription: " + sDescription); + logInfo("\tRegistering Bitstream: " + sFilePath + + "\tAssetstore: " + iAssetstore + + "\tBundle: " + sBundle + + "\tDescription: " + sDescription); continue; // process next line in contents file } @@ -862,7 +1214,7 @@ protected List processContentsFile(Context c, Item i, String path, if (bitstreamEndIndex == -1) { // no extra info processContentFileEntry(c, i, path, line, null, false); - System.out.println("\tBitstream: " + line); + logInfo("\tBitstream: " + line); } else { String bitstreamName = line.substring(0, bitstreamEndIndex); @@ -870,6 +1222,59 @@ protected List processContentsFile(Context c, Item i, String path, boolean bundleExists = false; boolean permissionsExist = false; boolean descriptionExists = false; + boolean labelExists = false; + boolean heightExists = false; + boolean widthExists = false; + boolean tocExists = false; + + // look for label + String labelMarker = "\tiiif-label"; + int lMarkerIndex = line.indexOf(labelMarker); + int lEndIndex = 0; + if (lMarkerIndex > 0) { + lEndIndex = line.indexOf("\t", lMarkerIndex + 1); + if (lEndIndex == -1) { + lEndIndex = line.length(); + } + labelExists = true; + } + + // look for height + String heightMarker = "\tiiif-height"; + int hMarkerIndex = line.indexOf(heightMarker); + int hEndIndex = 0; + if (hMarkerIndex > 0) { + hEndIndex = line.indexOf("\t", hMarkerIndex + 1); + if (hEndIndex == -1) { + hEndIndex = line.length(); + } + heightExists = true; + } + + // look for width + String widthMarker = "\tiiif-width"; + int wMarkerIndex = line.indexOf(widthMarker); + int wEndIndex = 0; + if (wMarkerIndex > 0) { + wEndIndex = line.indexOf("\t", wMarkerIndex + 1); + if (wEndIndex == -1) { + wEndIndex = line.length(); + } + widthExists = true; + } + + // look for toc + String tocMarker = "\tiiif-toc"; + int tMarkerIndex = line.indexOf(tocMarker); + int tEndIndex = 0; + if (tMarkerIndex > 0) { + tEndIndex = line.indexOf("\t", tMarkerIndex + 1); + if (tEndIndex == -1) { + tEndIndex = line.length(); + } + tocExists = true; + } + // look for a bundle name String bundleMarker = "\tbundle:"; @@ -918,18 +1323,20 @@ protected List processContentsFile(Context c, Item i, String path, if (bundleExists) { String bundleName = line.substring(bMarkerIndex - + bundleMarker.length(), bEndIndex).trim(); + + bundleMarker.length(), bEndIndex).trim(); processContentFileEntry(c, i, path, bitstreamName, bundleName, primary); - System.out.println("\tBitstream: " + bitstreamName + - "\tBundle: " + bundleName + - primaryStr); + logInfo("\tBitstream: " + bitstreamName + + "\tBundle: " + bundleName + + primaryStr); } else { processContentFileEntry(c, i, path, bitstreamName, null, primary); - System.out.println("\tBitstream: " + bitstreamName + primaryStr); + logInfo("\tBitstream: " + bitstreamName + primaryStr); } - if (permissionsExist || descriptionExists) { + if (permissionsExist || descriptionExists || labelExists || heightExists + || widthExists || tocExists) { + logInfo("Gathering options."); String extraInfo = bitstreamName; if (permissionsExist) { @@ -942,6 +1349,26 @@ protected List processContentsFile(Context c, Item i, String path, + line.substring(dMarkerIndex, dEndIndex); } + if (labelExists) { + extraInfo = extraInfo + + line.substring(lMarkerIndex, lEndIndex); + } + + if (heightExists) { + extraInfo = extraInfo + + line.substring(hMarkerIndex, hEndIndex); + } + + if (widthExists) { + extraInfo = extraInfo + + line.substring(wMarkerIndex, wEndIndex); + } + + if (tocExists) { + extraInfo = extraInfo + + line.substring(tMarkerIndex, tEndIndex); + } + options.add(extraInfo); } } @@ -956,12 +1383,12 @@ protected List processContentsFile(Context c, Item i, String path, String[] dirListing = dir.list(); for (String fileName : dirListing) { if (!"dublin_core.xml".equals(fileName) && !fileName.equals("handle") && !metadataFileFilter - .accept(dir, fileName)) { + .accept(dir, fileName) && !"collections".equals(fileName) && !"relationships".equals(fileName)) { throw new FileNotFoundException("No contents file found"); } } - System.out.println("No contents file found - but only metadata files found. Assuming metadata only."); + logInfo("No contents file found - but only metadata files found. Assuming metadata only."); } return options; @@ -983,6 +1410,10 @@ protected List processContentsFile(Context c, Item i, String path, protected void processContentFileEntry(Context c, Item i, String path, String fileName, String bundleName, boolean primary) throws SQLException, IOException, AuthorizeException { + if (isExcludeContent) { + return; + } + String fullpath = path + File.separatorChar + fileName; // get an input stream @@ -1123,11 +1554,16 @@ protected void registerBitstream(Context c, Item i, int assetstore, */ protected void processOptions(Context c, Item myItem, List options) throws SQLException, AuthorizeException { + logInfo("Processing options."); for (String line : options) { - System.out.println("\tprocessing " + line); + logInfo("\tprocessing " + line); boolean permissionsExist = false; boolean descriptionExists = false; + boolean labelExists = false; + boolean heightExists = false; + boolean widthExists = false; + boolean tocExists = false; String permissionsMarker = "\tpermissions:"; int pMarkerIndex = line.indexOf(permissionsMarker); @@ -1151,6 +1587,56 @@ protected void processOptions(Context c, Item myItem, List options) descriptionExists = true; } + + // look for label + String labelMarker = "\tiiif-label:"; + int lMarkerIndex = line.indexOf(labelMarker); + int lEndIndex = 0; + if (lMarkerIndex > 0) { + lEndIndex = line.indexOf("\t", lMarkerIndex + 1); + if (lEndIndex == -1) { + lEndIndex = line.length(); + } + labelExists = true; + } + + // look for height + String heightMarker = "\tiiif-height:"; + int hMarkerIndex = line.indexOf(heightMarker); + int hEndIndex = 0; + if (hMarkerIndex > 0) { + hEndIndex = line.indexOf("\t", hMarkerIndex + 1); + if (hEndIndex == -1) { + hEndIndex = line.length(); + } + heightExists = true; + } + + // look for width + String widthMarker = "\tiiif-width:"; + int wMarkerIndex = line.indexOf(widthMarker); + int wEndIndex = 0; + if (wMarkerIndex > 0) { + wEndIndex = line.indexOf("\t", wMarkerIndex + 1); + if (wEndIndex == -1) { + wEndIndex = line.length(); + } + widthExists = true; + } + + // look for toc + String tocMarker = "\tiiif-toc:"; + int tMarkerIndex = line.indexOf(tocMarker); + int tEndIndex = 0; + if (tMarkerIndex > 0) { + tEndIndex = line.indexOf("\t", tMarkerIndex + 1); + if (tEndIndex == -1) { + tEndIndex = line.length(); + } + tocExists = true; + } + + int bsEndIndex = line.indexOf("\t"); String bitstreamName = line.substring(0, bsEndIndex); @@ -1159,7 +1645,7 @@ protected void processOptions(Context c, Item myItem, List options) Group myGroup = null; if (permissionsExist) { String thisPermission = line.substring(pMarkerIndex - + permissionsMarker.length(), pEndIndex); + + permissionsMarker.length(), pEndIndex); // get permission type ("read" or "write") int pTypeIndex = thisPermission.indexOf('-'); @@ -1176,7 +1662,7 @@ protected void processOptions(Context c, Item myItem, List options) } groupName = thisPermission.substring(groupIndex + 1, - groupEndIndex); + groupEndIndex); if (thisPermission.toLowerCase().charAt(pTypeIndex + 1) == 'r') { actionID = Constants.READ; @@ -1187,8 +1673,8 @@ protected void processOptions(Context c, Item myItem, List options) try { myGroup = groupService.findByName(c, groupName); } catch (SQLException sqle) { - System.out.println("SQL Exception finding group name: " - + groupName); + logError("SQL Exception finding group name: " + + groupName); // do nothing, will check for null group later } } @@ -1196,46 +1682,114 @@ protected void processOptions(Context c, Item myItem, List options) String thisDescription = ""; if (descriptionExists) { thisDescription = line.substring( - dMarkerIndex + descriptionMarker.length(), dEndIndex) + dMarkerIndex + descriptionMarker.length(), dEndIndex) .trim(); } + String thisLabel = ""; + if (labelExists) { + thisLabel = line.substring( + lMarkerIndex + labelMarker.length(), lEndIndex) + .trim(); + } + + String thisHeight = ""; + if (heightExists) { + thisHeight = line.substring( + hMarkerIndex + heightMarker.length(), hEndIndex) + .trim(); + } + + String thisWidth = ""; + if (widthExists) { + thisWidth = line.substring( + wMarkerIndex + widthMarker.length(), wEndIndex) + .trim(); + } + + String thisToc = ""; + if (tocExists) { + thisToc = line.substring( + tMarkerIndex + tocMarker.length(), tEndIndex) + .trim(); + } + + if (isTest) { + continue; + } + Bitstream bs = null; - boolean notfound = true; - if (!isTest) { - // find bitstream - List bitstreams = itemService.getNonInternalBitstreams(c, myItem); - for (int j = 0; j < bitstreams.size() && notfound; j++) { - if (bitstreams.get(j).getName().equals(bitstreamName)) { - bs = bitstreams.get(j); - notfound = false; - } + boolean updateRequired = false; + + // find bitstream + List bitstreams = itemService.getNonInternalBitstreams(c, myItem); + for (Bitstream bitstream : bitstreams) { + if (bitstream.getName().equals(bitstreamName)) { + bs = bitstream; + break; } } - if (notfound && !isTest) { + if (null == bs) { // this should never happen - System.out.println("\tdefault permissions set for " - + bitstreamName); - } else if (!isTest) { + logInfo("\tdefault permissions set for " + bitstreamName); + } else { if (permissionsExist) { if (myGroup == null) { - System.out.println("\t" + groupName - + " not found, permissions set to default"); + logInfo("\t" + groupName + + " not found, permissions set to default"); } else if (actionID == -1) { - System.out - .println("\tinvalid permissions flag, permissions set to default"); + logInfo("\tinvalid permissions flag, permissions set to default"); } else { - System.out.println("\tSetting special permissions for " - + bitstreamName); + logInfo("\tSetting special permissions for " + + bitstreamName); setPermission(c, myGroup, actionID, bs); } } if (descriptionExists) { - System.out.println("\tSetting description for " - + bitstreamName); + logInfo("\tSetting description for " + + bitstreamName); bs.setDescription(c, thisDescription); + updateRequired = true; + } + + if (labelExists) { + MetadataField metadataField = metadataFieldService + .findByElement(c, METADATA_IIIF_SCHEMA, METADATA_IIIF_LABEL_ELEMENT, null); + logInfo("\tSetting label to " + thisLabel + " in element " + + metadataField.getElement() + " on " + bitstreamName); + bitstreamService.addMetadata(c, bs, metadataField, null, thisLabel); + updateRequired = true; + } + + if (heightExists) { + MetadataField metadataField = metadataFieldService + .findByElement(c, METADATA_IIIF_SCHEMA, METADATA_IIIF_IMAGE_ELEMENT, + METADATA_IIIF_HEIGHT_QUALIFIER); + logInfo("\tSetting height to " + thisHeight + " in element " + + metadataField.getElement() + " on " + bitstreamName); + bitstreamService.addMetadata(c, bs, metadataField, null, thisHeight); + updateRequired = true; + } + if (widthExists) { + MetadataField metadataField = metadataFieldService + .findByElement(c, METADATA_IIIF_SCHEMA, METADATA_IIIF_IMAGE_ELEMENT, + METADATA_IIIF_WIDTH_QUALIFIER); + logInfo("\tSetting width to " + thisWidth + " in element " + + metadataField.getElement() + " on " + bitstreamName); + bitstreamService.addMetadata(c, bs, metadataField, null, thisWidth); + updateRequired = true; + } + if (tocExists) { + MetadataField metadataField = metadataFieldService + .findByElement(c, METADATA_IIIF_SCHEMA, METADATA_IIIF_TOC_ELEMENT, null); + logInfo("\tSetting toc to " + thisToc + " in element " + + metadataField.getElement() + " on " + bitstreamName); + bitstreamService.addMetadata(c, bs, metadataField, null, thisToc); + updateRequired = true; + } + if (updateRequired) { bitstreamService.update(c, bs); } } @@ -1269,9 +1823,9 @@ protected void setPermission(Context c, Group g, int actionID, Bitstream bs) resourcePolicyService.update(c, rp); } else { if (actionID == Constants.READ) { - System.out.println("\t\tpermissions: READ for " + g.getName()); + logInfo("\t\tpermissions: READ for " + g.getName()); } else if (actionID == Constants.WRITE) { - System.out.println("\t\tpermissions: WRITE for " + g.getName()); + logInfo("\t\tpermissions: WRITE for " + g.getName()); } } @@ -1352,7 +1906,7 @@ protected boolean deleteDirectory(File path) { deleteDirectory(files[i]); } else { if (!files[i].delete()) { - log.error("Unable to delete file: " + files[i].getName()); + logError("Unable to delete file: " + files[i].getName()); } } } @@ -1372,7 +1926,7 @@ public String unzip(File zipfile, String destDir) throws IOException { // 2 // does the zip file exist and can we write to the temp directory if (!zipfile.canRead()) { - log.error("Zip file '" + zipfile.getAbsolutePath() + "' does not exist, or is not readable."); + logError("Zip file '" + zipfile.getAbsolutePath() + "' does not exist, or is not readable."); } String destinationDir = destDir; @@ -1382,13 +1936,13 @@ public String unzip(File zipfile, String destDir) throws IOException { File tempdir = new File(destinationDir); if (!tempdir.isDirectory()) { - log.error("'" + configurationService.getProperty("org.dspace.app.itemexport.work.dir") + - "' as defined by the key 'org.dspace.app.itemexport.work.dir' in dspace.cfg " + + logError("'" + configurationService.getProperty("org.dspace.app.batchitemimport.work.dir") + + "' as defined by the key 'org.dspace.app.batchitemimport.work.dir' in dspace.cfg " + "is not a valid directory"); } if (!tempdir.exists() && !tempdir.mkdirs()) { - log.error("Unable to create temporary directory: " + tempdir.getAbsolutePath()); + logError("Unable to create temporary directory: " + tempdir.getAbsolutePath()); } String sourcedir = destinationDir + System.getProperty("file.separator") + zipfile.getName(); String zipDir = destinationDir + System.getProperty("file.separator") + zipfile.getName() + System @@ -1400,71 +1954,71 @@ public String unzip(File zipfile, String destDir) throws IOException { ZipFile zf = new ZipFile(zipfile); ZipEntry entry; Enumeration entries = zf.entries(); - while (entries.hasMoreElements()) { - entry = entries.nextElement(); - if (entry.isDirectory()) { - if (!new File(zipDir + entry.getName()).mkdirs()) { - log.error("Unable to create contents directory: " + zipDir + entry.getName()); - } - } else { - String entryName = entry.getName(); - File outFile = new File(zipDir + entryName); - // Verify that this file will be extracted into our zipDir (and not somewhere else!) - if (!outFile.toPath().normalize().startsWith(zipDir)) { - throw new IOException("Bad zip entry: '" + entryName - + "' in file '" + zipfile.getAbsolutePath() + "'!" - + " Cannot process this file."); + try { + while (entries.hasMoreElements()) { + entry = entries.nextElement(); + if (entry.isDirectory()) { + if (!new File(zipDir + entry.getName()).mkdirs()) { + logError("Unable to create contents directory: " + zipDir + entry.getName()); + } } else { - System.out.println("Extracting file: " + entryName); - log.info("Extracting file: " + entryName); + String entryName = entry.getName(); + File outFile = new File(zipDir + entryName); + // Verify that this file will be extracted into our zipDir (and not somewhere else!) + if (!outFile.toPath().normalize().startsWith(zipDir)) { + throw new IOException("Bad zip entry: '" + entryName + + "' in file '" + zipfile.getAbsolutePath() + "'!" + + " Cannot process this file."); + } else { + logInfo("Extracting file: " + entryName); - int index = entryName.lastIndexOf('/'); - if (index == -1) { - // Was it created on Windows instead? - index = entryName.lastIndexOf('\\'); - } - if (index > 0) { - File dir = new File(zipDir + entryName.substring(0, index)); - if (!dir.exists() && !dir.mkdirs()) { - log.error("Unable to create directory: " + dir.getAbsolutePath()); + int index = entryName.lastIndexOf('/'); + if (index == -1) { + // Was it created on Windows instead? + index = entryName.lastIndexOf('\\'); } + if (index > 0) { + File dir = new File(zipDir + entryName.substring(0, index)); + if (!dir.exists() && !dir.mkdirs()) { + logError("Unable to create directory: " + dir.getAbsolutePath()); + } - //Entries could have too many directories, and we need to adjust the sourcedir - // file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|... - // SimpleArchiveFormat / item2 / contents|dublin_core|... - // or - // file2.zip (item1 / contents|dublin_core|... - // item2 / contents|dublin_core|... - - //regex supports either windows or *nix file paths - String[] entryChunks = entryName.split("/|\\\\"); - if (entryChunks.length > 2) { - if (StringUtils.equals(sourceDirForZip, sourcedir)) { - sourceDirForZip = sourcedir + "/" + entryChunks[0]; + //Entries could have too many directories, and we need to adjust the sourcedir + // file1.zip (SimpleArchiveFormat / item1 / contents|dublin_core|... + // SimpleArchiveFormat / item2 / contents|dublin_core|... + // or + // file2.zip (item1 / contents|dublin_core|... + // item2 / contents|dublin_core|... + + //regex supports either windows or *nix file paths + String[] entryChunks = entryName.split("/|\\\\"); + if (entryChunks.length > 2) { + if (StringUtils.equals(sourceDirForZip, sourcedir)) { + sourceDirForZip = sourcedir + "/" + entryChunks[0]; + } } } + byte[] buffer = new byte[1024]; + int len; + InputStream in = zf.getInputStream(entry); + BufferedOutputStream out = new BufferedOutputStream( + new FileOutputStream(outFile)); + while ((len = in.read(buffer)) >= 0) { + out.write(buffer, 0, len); + } + in.close(); + out.close(); } - byte[] buffer = new byte[1024]; - int len; - InputStream in = zf.getInputStream(entry); - BufferedOutputStream out = new BufferedOutputStream( - new FileOutputStream(outFile)); - while ((len = in.read(buffer)) >= 0) { - out.write(buffer, 0, len); - } - in.close(); - out.close(); } } + } finally { + //Close zip file + zf.close(); } - //Close zip file - zf.close(); - if (!StringUtils.equals(sourceDirForZip, sourcedir)) { sourcedir = sourceDirForZip; - System.out.println("Set sourceDir using path inside of Zip: " + sourcedir); - log.info("Set sourceDir using path inside of Zip: " + sourcedir); + logInfo("Set sourceDir using path inside of Zip: " + sourcedir); } return sourcedir; @@ -1514,20 +2068,15 @@ public void processUIImport(String filepath, Collection owningCollection, String final String theFilePath = filepath; final String theInputType = inputType; final String theResumeDir = resumeDir; - final boolean useTemplateItem = template; Thread go = new Thread() { @Override public void run() { - Context context = null; - + Context context = new Context(); String importDir = null; EPerson eperson = null; try { - - // create a new dspace context - context = new Context(); eperson = ePersonService.find(context, oldEPerson.getID()); context.setCurrentUser(eperson); context.turnOffAuthorisationSystem(); @@ -1538,7 +2087,8 @@ public void run() { if (theOtherCollections != null) { for (String colID : theOtherCollections) { UUID colId = UUID.fromString(colID); - if (!theOwningCollection.getID().equals(colId)) { + if (theOwningCollection != null + && !theOwningCollection.getID().equals(colId)) { Collection col = collectionService.find(context, colId); if (col != null) { collectionList.add(col); @@ -1557,7 +2107,7 @@ public void run() { if (!importDirFile.exists()) { boolean success = importDirFile.mkdirs(); if (!success) { - log.info("Cannot create batch import directory!"); + logInfo("Cannot create batch import directory!"); throw new Exception("Cannot create batch import directory!"); } } @@ -1689,14 +2239,14 @@ public void emailSuccessMessage(Context context, EPerson eperson, email.send(); } catch (Exception e) { - log.warn(LogManager.getHeader(context, "emailSuccessMessage", "cannot notify user of import"), e); + logError(LogHelper.getHeader(context, "emailSuccessMessage", "cannot notify user of import"), e); } } @Override public void emailErrorMessage(EPerson eperson, String error) throws MessagingException { - log.warn("An error occurred during item import, the user will be notified. " + error); + logError("An error occurred during item import, the user will be notified. " + error); try { Locale supportedLocale = I18nUtil.getEPersonLocale(eperson); Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "bte_batch_import_error")); @@ -1706,7 +2256,7 @@ public void emailErrorMessage(EPerson eperson, String error) email.send(); } catch (Exception e) { - log.warn("error during item import error notification", e); + logError("error during item import error notification", e); } } @@ -1784,18 +2334,17 @@ public File getTempWorkDirFile() + tempDirFile.getAbsolutePath() + " could not be created."); } else { - log.debug("Created directory " + tempDirFile.getAbsolutePath()); + logDebug("Created directory " + tempDirFile.getAbsolutePath()); } } else { - log.debug("Work directory exists: " + tempDirFile.getAbsolutePath()); + logDebug("Work directory exists: " + tempDirFile.getAbsolutePath()); } return tempDirFile; } @Override public void cleanupZipTemp() { - System.out.println("Deleting temporary zip directory: " + tempWorkDir); - log.debug("Deleting temporary zip directory: " + tempWorkDir); + logDebug("Deleting temporary zip directory: " + tempWorkDir); deleteDirectory(new File(tempWorkDir)); } @@ -1804,6 +2353,11 @@ public void setTest(boolean isTest) { this.isTest = isTest; } + @Override + public void setExcludeContent(boolean isExcludeContent) { + this.isExcludeContent = isExcludeContent; + } + @Override public void setResume(boolean isResume) { this.isResume = isResume; @@ -1823,4 +2377,82 @@ public void setUseWorkflowSendEmail(boolean useWorkflowSendEmail) { public void setQuiet(boolean isQuiet) { this.isQuiet = isQuiet; } + + @Override + public void setHandler(DSpaceRunnableHandler handler) { + this.handler = handler; + } + + private void logInfo(String message) { + logInfo(message, null); + } + + private void logInfo(String message, Exception e) { + if (handler != null) { + handler.logInfo(message); + return; + } + + if (e != null) { + log.info(message, e); + } else { + log.info(message); + } + } + + private void logDebug(String message) { + logDebug(message, null); + } + + private void logDebug(String message, Exception e) { + if (handler != null) { + handler.logDebug(message); + return; + } + + if (e != null) { + log.debug(message, e); + } else { + log.debug(message); + } + } + + private void logWarn(String message) { + logWarn(message, null); + } + + private void logWarn(String message, Exception e) { + if (handler != null) { + handler.logWarning(message); + return; + } + + if (e != null) { + log.warn(message, e); + } else { + log.warn(message); + } + } + + private void logError(String message) { + logError(message, null); + } + + private void logError(String message, Exception e) { + if (handler != null) { + if (e != null) { + handler.logError(message, e); + } else { + handler.logError(message); + } + return; + } + + if (e != null) { + log.error(message, e); + } else { + log.error(message); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/service/ItemImportService.java b/dspace-api/src/main/java/org/dspace/app/itemimport/service/ItemImportService.java index af333764b562..e99ece31b9bb 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/service/ItemImportService.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/service/ItemImportService.java @@ -16,6 +16,7 @@ import org.dspace.content.Collection; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.scripts.handler.DSpaceRunnableHandler; /** * Import items into DSpace. The conventional use is upload files by copying @@ -105,7 +106,7 @@ public void processUIImport(String url, Collection owningCollection, String[] co String inputType, Context context, boolean template) throws Exception; /** - * Since the BTE batch import is done in a new thread we are unable to communicate + * If a batch import is done in a new thread we are unable to communicate * with calling method about success or failure. We accomplish this * communication with email instead. Send a success email once the batch * import is complete @@ -119,7 +120,7 @@ public void emailSuccessMessage(Context context, EPerson eperson, String fileName) throws MessagingException; /** - * Since the BTE batch import is done in a new thread we are unable to communicate + * If a batch import is done in a new thread we are unable to communicate * with calling method about success or failure. We accomplis this * communication with email instead. Send an error email if the batch * import fails @@ -210,6 +211,13 @@ public void replaceItems(Context c, List mycollections, String sourc */ public void setTest(boolean isTest); + /** + * Set exclude-content flag. + * + * @param isExcludeContent true or false + */ + public void setExcludeContent(boolean isExcludeContent); + /** * Set resume flag * @@ -235,4 +243,10 @@ public void replaceItems(Context c, List mycollections, String sourc * @param isQuiet true or false */ public void setQuiet(boolean isQuiet); + + /** + * Set the DSpace Runnable Handler + * @param handler + */ + public void setHandler(DSpaceRunnableHandler handler); } diff --git a/dspace-api/src/main/java/org/dspace/app/itemmarking/ItemMarkingAvailabilityBitstreamStrategy.java b/dspace-api/src/main/java/org/dspace/app/itemmarking/ItemMarkingAvailabilityBitstreamStrategy.java index cd08ad032c49..31166add72ec 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemmarking/ItemMarkingAvailabilityBitstreamStrategy.java +++ b/dspace-api/src/main/java/org/dspace/app/itemmarking/ItemMarkingAvailabilityBitstreamStrategy.java @@ -11,6 +11,8 @@ import java.sql.SQLException; import java.util.List; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.util.Util; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; @@ -34,8 +36,9 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr @Autowired(required = true) protected ItemService itemService; - public ItemMarkingAvailabilityBitstreamStrategy() { + private static final Logger LOG = LogManager.getLogger(); + public ItemMarkingAvailabilityBitstreamStrategy() { } @Override @@ -43,14 +46,14 @@ public ItemMarkingInfo getItemMarkingInfo(Context context, Item item) throws SQLException { List bundles = itemService.getBundles(item, "ORIGINAL"); - if (bundles.size() == 0) { + if (bundles.isEmpty()) { ItemMarkingInfo markInfo = new ItemMarkingInfo(); markInfo.setImageName(nonAvailableImageName); return markInfo; } else { Bundle originalBundle = bundles.iterator().next(); - if (originalBundle.getBitstreams().size() == 0) { + if (originalBundle.getBitstreams().isEmpty()) { ItemMarkingInfo markInfo = new ItemMarkingInfo(); markInfo.setImageName(nonAvailableImageName); @@ -72,8 +75,7 @@ public ItemMarkingInfo getItemMarkingInfo(Context context, Item item) try { bsLink = bsLink + Util.encodeBitstreamName(bitstream.getName(), Constants.DEFAULT_ENCODING); } catch (UnsupportedEncodingException e) { - - e.printStackTrace(); + LOG.warn("DSpace uses an unsupported encoding", e); } signInfo.setLink(bsLink); diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java index e9693fb3d1ab..644745304a23 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java +++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/AddBitstreamsAction.java @@ -77,7 +77,7 @@ public void execute(Context context, ItemArchive itarch, boolean isTest, ItemUpdate.pr("Contents bitstream count: " + contents.size()); String[] files = dir.list(ItemUpdate.fileFilter); - List fileList = new ArrayList(); + List fileList = new ArrayList<>(); for (String filename : files) { fileList.add(filename); ItemUpdate.pr("file: " + filename); @@ -134,9 +134,6 @@ protected String addBitstream(Context context, ItemArchive itarch, Item item, Fi ItemUpdate.pr("contents entry for bitstream: " + ce.toString()); File f = new File(dir, ce.filename); - // get an input stream - BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f)); - Bitstream bs = null; String newBundleName = ce.bundlename; @@ -173,7 +170,9 @@ protected String addBitstream(Context context, ItemArchive itarch, Item item, Fi targetBundle = bundles.iterator().next(); } - bs = bitstreamService.create(context, targetBundle, bis); + try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));) { + bs = bitstreamService.create(context, targetBundle, bis); + } bs.setName(context, ce.filename); // Identify the format diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/ContentsEntry.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/ContentsEntry.java index e192b92b89ea..7bbe4a19e9ea 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemupdate/ContentsEntry.java +++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/ContentsEntry.java @@ -105,6 +105,7 @@ public static ContentsEntry parse(String line) return new ContentsEntry(arp[0], arp[1], actionId, groupName, arp[3]); } + @Override public String toString() { StringBuilder sb = new StringBuilder(filename); if (bundlename != null) { diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/DtoMetadata.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/DtoMetadata.java index 6e4a4a88d6b4..e67b2221e47e 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemupdate/DtoMetadata.java +++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/DtoMetadata.java @@ -120,6 +120,7 @@ public boolean matches(String compoundForm, boolean wildcard) { return true; } + @Override public String toString() { String s = "\tSchema: " + schema + " Element: " + element; if (qualifier != null) { diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemArchive.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemArchive.java index 2270d736a8e3..26de45caf77e 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemArchive.java +++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemArchive.java @@ -17,6 +17,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; +import java.nio.charset.StandardCharsets; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; @@ -55,7 +56,7 @@ public class ItemArchive { protected Transformer transformer = null; protected List dtomList = null; - protected List undoDtomList = new ArrayList(); + protected List undoDtomList = new ArrayList<>(); protected List undoAddContents = new ArrayList<>(); // for undo of add @@ -325,7 +326,7 @@ public void writeUndo(File undoDir) PrintWriter pw = null; try { File f = new File(dir, ItemUpdate.DELETE_CONTENTS_FILE); - pw = new PrintWriter(new BufferedWriter(new FileWriter(f))); + pw = new PrintWriter(new BufferedWriter(new FileWriter(f, StandardCharsets.UTF_8))); for (UUID i : undoAddContents) { pw.println(i); } diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java index b6aa875f29b0..a3fe0b2321f7 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java +++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/ItemUpdate.java @@ -39,29 +39,34 @@ import org.dspace.handle.service.HandleService; /** - * Provides some batch editing capabilities for items in DSpace: - * Metadata fields - Add, Delete - * Bitstreams - Add, Delete + * Provides some batch editing capabilities for items in DSpace. + *

    + *
  • Metadata fields - Add, Delete
  • + *
  • Bitstreams - Add, Delete
  • + *
* - * The design has been for compatibility with ItemImporter + *

+ * The design has been for compatibility with + * {@link org.dspace.app.itemimport.service.ItemImportService} * in the use of the DSpace archive format which is used to * specify changes on a per item basis. The directory names * to correspond to each item are arbitrary and will only be * used for logging purposes. The reference to the item is - * from a required dc.identifier with the item handle to be - * included in the dublin_core.xml (or similar metadata) file. + * from a required {@code dc.identifier} with the item handle to be + * included in the {@code dublin_core.xml} (or similar metadata) file. * - * Any combination of these actions is permitted in a single run of this class + *

+ * Any combination of these actions is permitted in a single run of this class. * The order of actions is important when used in combination. - * It is the responsibility of the calling class (here, ItemUpdate) - * to register UpdateAction classes in the order to which they are + * It is the responsibility of the calling class (here, {@code ItemUpdate}) + * to register {@link UpdateAction} classes in the order which they are * to be performed. * - * - * It is unfortunate that so much code needs to be borrowed - * from ItemImport as it is not reusable in private methods, etc. - * Some of this has been placed into the MetadataUtilities class - * for possible reuse elsewhere. + *

+ * It is unfortunate that so much code needs to be borrowed from + * {@link org.dspace.app.itemimport.service.ItemImportService} as it is not + * reusable in private methods, etc. Some of this has been placed into the + * {@link MetadataUtilities} class for possible reuse elsewhere. * * @author W. Hays based on a conceptual design by R. Rodgers */ @@ -73,7 +78,7 @@ public class ItemUpdate { public static final String DELETE_CONTENTS_FILE = "delete_contents"; public static String HANDLE_PREFIX = null; - public static final Map filterAliases = new HashMap(); + public static final Map filterAliases = new HashMap<>(); public static boolean verbose = false; @@ -375,7 +380,7 @@ protected void processArchive(Context context, String sourceDirPath, String item // open and process the source directory File sourceDir = new File(sourceDirPath); - if ((sourceDir == null) || !sourceDir.exists() || !sourceDir.isDirectory()) { + if (!sourceDir.exists() || !sourceDir.isDirectory()) { pr("Error, cannot open archive source directory " + sourceDirPath); throw new Exception("error with archive source directory " + sourceDirPath); } diff --git a/dspace-api/src/main/java/org/dspace/app/itemupdate/MetadataUtilities.java b/dspace-api/src/main/java/org/dspace/app/itemupdate/MetadataUtilities.java index 5c2138a590d2..910eb434d1d0 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemupdate/MetadataUtilities.java +++ b/dspace-api/src/main/java/org/dspace/app/itemupdate/MetadataUtilities.java @@ -27,10 +27,12 @@ import javax.xml.transform.TransformerException; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.commons.lang3.StringUtils; -import org.apache.xpath.XPathAPI; -import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; @@ -170,24 +172,21 @@ public static void appendMetadata(Context context, Item item, DtoMetadata dtom, * @param docBuilder DocumentBuilder * @param is - InputStream of dublin_core.xml * @return list of DtoMetadata representing the metadata fields relating to an Item - * @throws SQLException if database error * @throws IOException if IO error * @throws ParserConfigurationException if parser config error * @throws SAXException if XML error - * @throws TransformerException if transformer error - * @throws AuthorizeException if authorization error */ public static List loadDublinCore(DocumentBuilder docBuilder, InputStream is) - throws SQLException, IOException, ParserConfigurationException, - SAXException, TransformerException, AuthorizeException { + throws IOException, XPathExpressionException, SAXException { Document document = docBuilder.parse(is); List dtomList = new ArrayList(); // Get the schema, for backward compatibility we will default to the // dublin core schema if the schema name is not available in the import file - String schema = null; - NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core"); + String schema; + XPath xPath = XPathFactory.newInstance().newXPath(); + NodeList metadata = (NodeList) xPath.compile("/dublin_core").evaluate(document, XPathConstants.NODESET); Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema"); if (schemaAttr == null) { schema = MetadataSchemaEnum.DC.getName(); @@ -196,7 +195,7 @@ public static List loadDublinCore(DocumentBuilder docBuilder, Input } // Get the nodes corresponding to formats - NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue"); + NodeList dcNodes = (NodeList) xPath.compile("/dublin_core/dcvalue").evaluate(document, XPathConstants.NODESET); for (int i = 0; i < dcNodes.getLength(); i++) { Node n = dcNodes.item(i); diff --git a/dspace-api/src/main/java/org/dspace/app/launcher/CommandRunner.java b/dspace-api/src/main/java/org/dspace/app/launcher/CommandRunner.java index ce33b6655bc6..06c2ddb48340 100644 --- a/dspace-api/src/main/java/org/dspace/app/launcher/CommandRunner.java +++ b/dspace-api/src/main/java/org/dspace/app/launcher/CommandRunner.java @@ -16,7 +16,7 @@ import java.util.ArrayList; import java.util.List; -import org.jdom.Document; +import org.jdom2.Document; /** * @author mwood diff --git a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java index d445f9bbf3f5..89a416bfa883 100644 --- a/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java +++ b/dspace-api/src/main/java/org/dspace/app/launcher/ScriptLauncher.java @@ -21,6 +21,7 @@ import org.apache.logging.log4j.Logger; import org.dspace.core.Context; import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.DSpaceRunnable.StepResult; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -29,9 +30,9 @@ import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelInit; import org.dspace.services.RequestService; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.input.SAXBuilder; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.input.SAXBuilder; /** * A DSpace script launcher. @@ -145,8 +146,13 @@ public static int handleScript(String[] args, Document commandConfigs, private static int executeScript(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, DSpaceRunnable script) { try { - script.initialize(args, dSpaceRunnableHandler, null); - script.run(); + StepResult result = script.initialize(args, dSpaceRunnableHandler, null); + // check the StepResult, only run the script if the result is Continue; + // otherwise - for example the script is started with the help as argument, nothing is to do + if (StepResult.Continue.equals(result)) { + // runs the script, the normal initialization is successful + script.run(); + } return 0; } catch (ParseException e) { script.printHelp(); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java index 2d963dd3da79..9e28edad45b5 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/Brand.java @@ -21,10 +21,10 @@ */ public class Brand { - private int brandWidth; - private int brandHeight; - private Font font; - private int xOffset; + private final int brandWidth; + private final int brandHeight; + private final Font font; + private final int xOffset; /** * Constructor to set up footer image attributes. @@ -92,7 +92,7 @@ public BufferedImage create(String brandLeftText, * do the text placements and preparatory work for the brand image generation * * @param brandImage a BufferedImage object where the image is created - * @param identifier and Identifier object describing what text is to be placed in what + * @param brandText an Identifier object describing what text is to be placed in what * position within the brand */ private void drawImage(BufferedImage brandImage, diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java index ae77f6048b48..91107406434e 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/BrandText.java @@ -39,7 +39,7 @@ class BrandText { * its location within a rectangular area. * * @param location one of the class location constants e.g. Identifier.BL - * @param the text associated with the location + * @param text text associated with the location */ public BrandText(String location, String text) { this.location = location; diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ExcelFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ExcelFilter.java deleted file mode 100644 index c17d168c0435..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ExcelFilter.java +++ /dev/null @@ -1,99 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.io.InputStream; -import java.nio.charset.StandardCharsets; - -import org.apache.commons.io.IOUtils; -import org.apache.logging.log4j.Logger; -import org.apache.poi.POITextExtractor; -import org.apache.poi.extractor.ExtractorFactory; -import org.apache.poi.hssf.extractor.ExcelExtractor; -import org.apache.poi.xssf.extractor.XSSFExcelExtractor; -import org.dspace.content.Item; - -/* - * ExcelFilter - * - * Entries you must add to dspace.cfg: - * - * filter.plugins = blah, \ - * Excel Text Extractor - * - * plugin.named.org.dspace.app.mediafilter.FormatFilter = \ - * blah = blah, \ - * org.dspace.app.mediafilter.ExcelFilter = Excel Text Extractor - * - * #Configure each filter's input Formats - * filter.org.dspace.app.mediafilter.ExcelFilter.inputFormats = Microsoft Excel, Microsoft Excel XML - * - */ -public class ExcelFilter extends MediaFilter { - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ExcelFilter.class); - - public String getFilteredName(String oldFilename) { - return oldFilename + ".txt"; - } - - /** - * @return String bundle name - */ - public String getBundleName() { - return "TEXT"; - } - - /** - * @return String bitstream format - */ - public String getFormatString() { - return "Text"; - } - - /** - * @return String description - */ - public String getDescription() { - return "Extracted text"; - } - - /** - * @param item item - * @param source source input stream - * @param verbose verbose mode - * @return InputStream the resulting input stream - * @throws Exception if error - */ - @Override - public InputStream getDestinationStream(Item item, InputStream source, boolean verbose) - throws Exception { - String extractedText = null; - - try { - POITextExtractor theExtractor = ExtractorFactory.createExtractor(source); - if (theExtractor instanceof ExcelExtractor) { - // for xls file - extractedText = (theExtractor).getText(); - } else if (theExtractor instanceof XSSFExcelExtractor) { - // for xlsx file - extractedText = (theExtractor).getText(); - } - } catch (Exception e) { - log.error("Error filtering bitstream: " + e.getMessage(), e); - throw e; - } - - if (extractedText != null) { - // generate an input stream with the extracted text - return IOUtils.toInputStream(extractedText, StandardCharsets.UTF_8); - } - - return null; - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/HTMLFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/HTMLFilter.java deleted file mode 100644 index 1b982cb27737..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/HTMLFilter.java +++ /dev/null @@ -1,81 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import javax.swing.text.Document; -import javax.swing.text.html.HTMLEditorKit; - -import org.dspace.content.Item; - -/* - * - * to do: helpful error messages - can't find mediafilter.cfg - can't - * instantiate filter - bitstream format doesn't exist - * - */ -public class HTMLFilter extends MediaFilter { - - @Override - public String getFilteredName(String oldFilename) { - return oldFilename + ".txt"; - } - - /** - * @return String bundle name - */ - @Override - public String getBundleName() { - return "TEXT"; - } - - /** - * @return String bitstreamformat - */ - @Override - public String getFormatString() { - return "Text"; - } - - /** - * @return String description - */ - @Override - public String getDescription() { - return "Extracted text"; - } - - /** - * @param currentItem item - * @param source source input stream - * @param verbose verbose mode - * @return InputStream the resulting input stream - * @throws Exception if error - */ - @Override - public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) - throws Exception { - // try and read the document - set to ignore character set directive, - // assuming that the input stream is already set properly (I hope) - HTMLEditorKit kit = new HTMLEditorKit(); - Document doc = kit.createDefaultDocument(); - - doc.putProperty("IgnoreCharsetDirective", Boolean.TRUE); - - kit.read(source, doc, 0); - - String extractedText = doc.getText(0, doc.getLength()); - - // generate an input stream with the extracted text - byte[] textBytes = extractedText.getBytes(); - ByteArrayInputStream bais = new ByteArrayInputStream(textBytes); - - return bais; // will this work? or will the byte array be out of scope? - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java index 467303c3cafd..afe1bb3d75df 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java @@ -22,7 +22,9 @@ public InputStream getDestinationStream(Item currentItem, InputStream source, bo File f2 = null; File f3 = null; try { - f2 = getImageFile(f, 0, verbose); + // Step 1: get an image from our PDF file, with PDF-specific processing options + f2 = getImageFile(f, verbose); + // Step 2: use the image above to create the final resized and rotated thumbnail f3 = getThumbnailFile(f2, verbose); byte[] bytes = Files.readAllBytes(f3.toPath()); return new ByteArrayInputStream(bytes); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index a79fd42d5937..408982d157e5 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -14,6 +14,9 @@ import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; +import org.apache.pdfbox.pdmodel.PDDocument; +import org.apache.pdfbox.pdmodel.PDPage; +import org.apache.pdfbox.pdmodel.common.PDRectangle; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; @@ -113,13 +116,54 @@ public File getThumbnailFile(File f, boolean verbose) return f2; } - public File getImageFile(File f, int page, boolean verbose) + /** + * Return an image from a bitstream with specific processing options for + * PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to + * generate an intermediate image file for use with getThumbnailFile. + */ + public File getImageFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { - File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); + // Writing an intermediate file to disk is inefficient, but since we're + // doing it anyway, we should use a lossless format. IM's internal MIFF + // is lossless like PNG and TIFF, but much faster. + File f2 = new File(f.getParentFile(), f.getName() + ".miff"); f2.deleteOnExit(); ConvertCmd cmd = new ConvertCmd(); IMOperation op = new IMOperation(); - String s = "[" + page + "]"; + + // Optionally override ImageMagick's default density of 72 DPI to use a + // "supersample" when creating the PDF thumbnail. Note that I prefer to + // use the getProperty() method here instead of getIntPropert() because + // the latter always returns an integer (0 in the case it's not set). I + // would prefer to keep ImageMagick's default to itself rather than for + // us to set one. Also note that the density option *must* come before + // we open the input file. + String density = configurationService.getProperty(PRE + ".density"); + if (density != null) { + op.density(Integer.valueOf(density)); + } + + // Check the PDF's MediaBox and CropBox to see if they are the same. + // If not, then tell ImageMagick to use the CropBox when generating + // the thumbnail because the CropBox is generally used to define the + // area displayed when a user opens the PDF on a screen, whereas the + // MediaBox is used for print. Not all PDFs set these correctly, so + // we can use ImageMagick's default behavior unless we see an explit + // CropBox. Note: we don't need to do anything special to detect if + // the CropBox is missing or empty because pdfbox will set it to the + // same size as the MediaBox if it doesn't exist. Also note that we + // only need to check the first page, since that's what we use for + // generating the thumbnail (PDDocument uses a zero-based index). + PDPage pdfPage = PDDocument.load(f).getPage(0); + PDRectangle pdfPageMediaBox = pdfPage.getMediaBox(); + PDRectangle pdfPageCropBox = pdfPage.getCropBox(); + + // This option must come *before* we open the input file. + if (pdfPageCropBox != pdfPageMediaBox) { + op.define("pdf:use-cropbox=true"); + } + + String s = "[0]"; op.addImage(f.getAbsolutePath() + s); if (configurationService.getBooleanProperty(PRE + ".flatten", true)) { op.flatten(); @@ -172,20 +216,20 @@ public boolean preProcessBitstream(Context c, Item item, Bitstream source, boole if (description != null) { if (replaceRegex.matcher(description).matches()) { if (verbose) { - System.out.format("%s %s matches pattern and is replacable.%n", - description, nsrc); + System.out.format("%s %s matches pattern and is replaceable.%n", + description, n); } continue; } if (description.equals(getDescription())) { if (verbose) { System.out.format("%s %s is replaceable.%n", - getDescription(), nsrc); + getDescription(), n); } continue; } } - System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", + System.out.format("Custom thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", nsrc, item.getHandle()); return false; } diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java new file mode 100644 index 000000000000..4221a514d7d5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickVideoThumbnailFilter.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; + +import org.dspace.content.Item; +import org.im4java.core.ConvertCmd; +import org.im4java.core.IM4JavaException; +import org.im4java.core.IMOperation; + + +/** + * Filter video bitstreams, scaling the image to be within the bounds of + * thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be + * no bigger than. Creates only JPEGs. + */ +public class ImageMagickVideoThumbnailFilter extends ImageMagickThumbnailFilter { + private static final int DEFAULT_WIDTH = 180; + private static final int DEFAULT_HEIGHT = 120; + private static final int FRAME_NUMBER = 100; + + /** + * @param currentItem item + * @param source source input stream + * @param verbose verbose mode + * @return InputStream the resulting input stream + * @throws Exception if error + */ + @Override + public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) + throws Exception { + File f = inputStreamToTempFile(source, "imthumb", ".tmp"); + File f2 = null; + try { + f2 = getThumbnailFile(f, verbose); + byte[] bytes = Files.readAllBytes(f2.toPath()); + return new ByteArrayInputStream(bytes); + } finally { + //noinspection ResultOfMethodCallIgnored + f.delete(); + if (f2 != null) { + //noinspection ResultOfMethodCallIgnored + f2.delete(); + } + } + } + + @Override + public File getThumbnailFile(File f, boolean verbose) + throws IOException, InterruptedException, IM4JavaException { + File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); + f2.deleteOnExit(); + ConvertCmd cmd = new ConvertCmd(); + IMOperation op = new IMOperation(); + op.autoOrient(); + op.addImage("VIDEO:" + f.getAbsolutePath() + "[" + FRAME_NUMBER + "]"); + op.thumbnail(configurationService.getIntProperty("thumbnail.maxwidth", DEFAULT_WIDTH), + configurationService.getIntProperty("thumbnail.maxheight", DEFAULT_HEIGHT)); + op.addImage(f2.getAbsolutePath()); + if (verbose) { + System.out.println("IM Thumbnail Param: " + op); + } + cmd.run(op); + return f2; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterCLITool.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterCLITool.java deleted file mode 100644 index 067419df2245..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterCLITool.java +++ /dev/null @@ -1,328 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.MissingArgumentException; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.commons.lang3.ArrayUtils; -import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory; -import org.dspace.app.mediafilter.service.MediaFilterService; -import org.dspace.content.Collection; -import org.dspace.content.Community; -import org.dspace.content.DSpaceObject; -import org.dspace.content.Item; -import org.dspace.core.Constants; -import org.dspace.core.Context; -import org.dspace.core.SelfNamedPlugin; -import org.dspace.core.factory.CoreServiceFactory; -import org.dspace.handle.factory.HandleServiceFactory; -import org.dspace.services.factory.DSpaceServicesFactory; - -/** - * MediaFilterManager is the class that invokes the media/format filters over the - * repository's content. A few command line flags affect the operation of the - * MFM: -v verbose outputs all extracted text to STDOUT; -f force forces all - * bitstreams to be processed, even if they have been before; -n noindex does not - * recreate index after processing bitstreams; -i [identifier] limits processing - * scope to a community, collection or item; and -m [max] limits processing to a - * maximum number of items. - */ -public class MediaFilterCLITool { - - //key (in dspace.cfg) which lists all enabled filters by name - private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; - - //prefix (in dspace.cfg) for all filter properties - private static final String FILTER_PREFIX = "filter"; - - //suffix (in dspace.cfg) for input formats supported by each filter - private static final String INPUT_FORMATS_SUFFIX = "inputFormats"; - - /** - * Default constructor - */ - private MediaFilterCLITool() { } - - public static void main(String[] argv) throws Exception { - // set headless for non-gui workstations - System.setProperty("java.awt.headless", "true"); - - // create an options object and populate it - CommandLineParser parser = new DefaultParser(); - - int status = 0; - - Options options = new Options(); - - options.addOption("v", "verbose", false, - "print all extracted text and other details to STDOUT"); - options.addOption("q", "quiet", false, - "do not print anything except in the event of errors."); - options.addOption("f", "force", false, - "force all bitstreams to be processed"); - options.addOption("i", "identifier", true, - "ONLY process bitstreams belonging to identifier"); - options.addOption("m", "maximum", true, - "process no more than maximum items"); - options.addOption("h", "help", false, "help"); - - //create a "plugin" option (to specify specific MediaFilter plugins to run) - Option pluginOption = Option.builder("p") - .longOpt("plugins") - .hasArg() - .hasArgs() - .valueSeparator(',') - .desc( - "ONLY run the specified Media Filter plugin(s)\n" + - "listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" + - "Separate multiple with a comma (,)\n" + - "(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")") - .build(); - options.addOption(pluginOption); - - //create a "skip" option (to specify communities/collections/items to skip) - Option skipOption = Option.builder("s") - .longOpt("skip") - .hasArg() - .hasArgs() - .valueSeparator(',') - .desc( - "SKIP the bitstreams belonging to identifier\n" + - "Separate multiple identifiers with a comma (,)\n" + - "(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)") - .build(); - options.addOption(skipOption); - - boolean isVerbose = false; - boolean isQuiet = false; - boolean isForce = false; // default to not forced - String identifier = null; // object scope limiter - int max2Process = Integer.MAX_VALUE; - Map> filterFormats = new HashMap<>(); - - CommandLine line = null; - try { - line = parser.parse(options, argv); - } catch (MissingArgumentException e) { - System.out.println("ERROR: " + e.getMessage()); - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("MediaFilterManager\n", options); - System.exit(1); - } - - if (line.hasOption('h')) { - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("MediaFilterManager\n", options); - - System.exit(0); - } - - if (line.hasOption('v')) { - isVerbose = true; - } - - isQuiet = line.hasOption('q'); - - if (line.hasOption('f')) { - isForce = true; - } - - if (line.hasOption('i')) { - identifier = line.getOptionValue('i'); - } - - if (line.hasOption('m')) { - max2Process = Integer.parseInt(line.getOptionValue('m')); - if (max2Process <= 1) { - System.out.println("Invalid maximum value '" + - line.getOptionValue('m') + "' - ignoring"); - max2Process = Integer.MAX_VALUE; - } - } - - String filterNames[] = null; - if (line.hasOption('p')) { - //specified which media filter plugins we are using - filterNames = line.getOptionValues('p'); - - if (filterNames == null || filterNames.length == 0) { //display error, since no plugins specified - System.err.println("\nERROR: -p (-plugin) option requires at least one plugin to be specified.\n" + - "(e.g. MediaFilterManager -p \"Word Text Extractor\",\"PDF Text Extractor\")\n"); - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("MediaFilterManager\n", options); - System.exit(1); - } - } else { - //retrieve list of all enabled media filter plugins! - filterNames = DSpaceServicesFactory.getInstance().getConfigurationService() - .getArrayProperty(MEDIA_FILTER_PLUGINS_KEY); - } - - MediaFilterService mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService(); - mediaFilterService.setForce(isForce); - mediaFilterService.setQuiet(isQuiet); - mediaFilterService.setVerbose(isVerbose); - mediaFilterService.setMax2Process(max2Process); - - //initialize an array of our enabled filters - List filterList = new ArrayList<>(); - - //set up each filter - for (int i = 0; i < filterNames.length; i++) { - //get filter of this name & add to list of filters - FormatFilter filter = (FormatFilter) CoreServiceFactory.getInstance().getPluginService() - .getNamedPlugin(FormatFilter.class, filterNames[i]); - if (filter == null) { - System.err.println( - "\nERROR: Unknown MediaFilter specified (either from command-line or in dspace.cfg): '" + - filterNames[i] + "'"); - System.exit(1); - } else { - filterList.add(filter); - - String filterClassName = filter.getClass().getName(); - - String pluginName = null; - - //If this filter is a SelfNamedPlugin, - //then the input formats it accepts may differ for - //each "named" plugin that it defines. - //So, we have to look for every key that fits the - //following format: filter...inputFormats - if (SelfNamedPlugin.class.isAssignableFrom(filter.getClass())) { - //Get the plugin instance name for this class - pluginName = ((SelfNamedPlugin) filter).getPluginInstanceName(); - } - - - //Retrieve our list of supported formats from dspace.cfg - //For SelfNamedPlugins, format of key is: - // filter...inputFormats - //For other MediaFilters, format of key is: - // filter..inputFormats - String[] formats = - DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty( - FILTER_PREFIX + "." + filterClassName + - (pluginName != null ? "." + pluginName : "") + - "." + INPUT_FORMATS_SUFFIX); - - //add to internal map of filters to supported formats - if (ArrayUtils.isNotEmpty(formats)) { - //For SelfNamedPlugins, map key is: - // - //For other MediaFilters, map key is just: - // - filterFormats.put(filterClassName + - (pluginName != null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR + - pluginName : ""), - Arrays.asList(formats)); - } - } //end if filter!=null - } //end for - - //If verbose, print out loaded mediafilter info - if (isVerbose) { - System.out.println("The following MediaFilters are enabled: "); - Iterator i = filterFormats.keySet().iterator(); - while (i.hasNext()) { - String filterName = i.next(); - System.out.println("Full Filter Name: " + filterName); - String pluginName = null; - if (filterName.contains(MediaFilterService.FILTER_PLUGIN_SEPARATOR)) { - String[] fields = filterName.split(MediaFilterService.FILTER_PLUGIN_SEPARATOR); - filterName = fields[0]; - pluginName = fields[1]; - } - - System.out.println(filterName + - (pluginName != null ? " (Plugin: " + pluginName + ")" : "")); - } - } - - mediaFilterService.setFilterFormats(filterFormats); - //store our filter list into an internal array - mediaFilterService.setFilterClasses(filterList); - - - //Retrieve list of identifiers to skip (if any) - String skipIds[] = null; - if (line.hasOption('s')) { - //specified which identifiers to skip when processing - skipIds = line.getOptionValues('s'); - - if (skipIds == null || skipIds.length == 0) { //display error, since no identifiers specified to skip - System.err.println("\nERROR: -s (-skip) option requires at least one identifier to SKIP.\n" + - "Make sure to separate multiple identifiers with a comma!\n" + - "(e.g. MediaFilterManager -s 123456789/34,123456789/323)\n"); - HelpFormatter myhelp = new HelpFormatter(); - myhelp.printHelp("MediaFilterManager\n", options); - System.exit(0); - } - - //save to a global skip list - mediaFilterService.setSkipList(Arrays.asList(skipIds)); - } - - Context c = null; - - try { - c = new Context(); - - // have to be super-user to do the filtering - c.turnOffAuthorisationSystem(); - - // now apply the filters - if (identifier == null) { - mediaFilterService.applyFiltersAllItems(c); - } else { - // restrict application scope to identifier - DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, identifier); - if (dso == null) { - throw new IllegalArgumentException("Cannot resolve " - + identifier + " to a DSpace object"); - } - - switch (dso.getType()) { - case Constants.COMMUNITY: - mediaFilterService.applyFiltersCommunity(c, (Community) dso); - break; - case Constants.COLLECTION: - mediaFilterService.applyFiltersCollection(c, (Collection) dso); - break; - case Constants.ITEM: - mediaFilterService.applyFiltersItem(c, (Item) dso); - break; - default: - break; - } - } - - c.complete(); - c = null; - } catch (Exception e) { - status = 1; - } finally { - if (c != null) { - c.abort(); - } - } - System.exit(status); - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScript.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScript.java new file mode 100644 index 000000000000..5fbbebbb28cc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScript.java @@ -0,0 +1,262 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.ArrayUtils; +import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory; +import org.dspace.app.mediafilter.service.MediaFilterService; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.core.SelfNamedPlugin; +import org.dspace.core.factory.CoreServiceFactory; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; + +/** + * MediaFilterManager is the class that invokes the media/format filters over the + * repository's content. A few command line flags affect the operation of the + * MFM: -v verbose outputs all extracted text to STDOUT; -f force forces all + * bitstreams to be processed, even if they have been before; -n noindex does not + * recreate index after processing bitstreams; -i [identifier] limits processing + * scope to a community, collection or item; and -m [max] limits processing to a + * maximum number of items. + */ +public class MediaFilterScript extends DSpaceRunnable { + + //key (in dspace.cfg) which lists all enabled filters by name + private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; + + //prefix (in dspace.cfg) for all filter properties + private static final String FILTER_PREFIX = "filter"; + + //suffix (in dspace.cfg) for input formats supported by each filter + private static final String INPUT_FORMATS_SUFFIX = "inputFormats"; + + private boolean help; + private boolean isVerbose = false; + private boolean isQuiet = false; + private boolean isForce = false; // default to not forced + private String identifier = null; // object scope limiter + private int max2Process = Integer.MAX_VALUE; + private String[] filterNames; + private String[] skipIds = null; + private Map> filterFormats = new HashMap<>(); + + public MediaFilterScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("filter-media", MediaFilterScriptConfiguration.class); + } + + public void setup() throws ParseException { + + // set headless for non-gui workstations + System.setProperty("java.awt.headless", "true"); + + + help = commandLine.hasOption('h'); + + if (commandLine.hasOption('v')) { + isVerbose = true; + } + + isQuiet = commandLine.hasOption('q'); + + if (commandLine.hasOption('f')) { + isForce = true; + } + + if (commandLine.hasOption('i')) { + identifier = commandLine.getOptionValue('i'); + } + + if (commandLine.hasOption('m')) { + max2Process = Integer.parseInt(commandLine.getOptionValue('m')); + if (max2Process <= 1) { + handler.logWarning("Invalid maximum value '" + + commandLine.getOptionValue('m') + "' - ignoring"); + max2Process = Integer.MAX_VALUE; + } + } + + if (commandLine.hasOption('p')) { + //specified which media filter plugins we are using + filterNames = commandLine.getOptionValues('p'); + } else { + //retrieve list of all enabled media filter plugins! + filterNames = DSpaceServicesFactory.getInstance().getConfigurationService() + .getArrayProperty(MEDIA_FILTER_PLUGINS_KEY); + } + + //save to a global skip list + if (commandLine.hasOption('s')) { + //specified which identifiers to skip when processing + skipIds = commandLine.getOptionValues('s'); + } + + + } + + public void internalRun() throws Exception { + if (help) { + printHelp(); + return; + } + + MediaFilterService mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService(); + mediaFilterService.setLogHandler(handler); + mediaFilterService.setForce(isForce); + mediaFilterService.setQuiet(isQuiet); + mediaFilterService.setVerbose(isVerbose); + mediaFilterService.setMax2Process(max2Process); + + //initialize an array of our enabled filters + List filterList = new ArrayList<>(); + + + //set up each filter + for (int i = 0; i < filterNames.length; i++) { + //get filter of this name & add to list of filters + FormatFilter filter = (FormatFilter) CoreServiceFactory.getInstance().getPluginService() + .getNamedPlugin(FormatFilter.class, filterNames[i]); + if (filter == null) { + handler.handleException("ERROR: Unknown MediaFilter specified (either from command-line or in " + + "dspace.cfg): '" + filterNames[i] + "'"); + handler.logError("ERROR: Unknown MediaFilter specified (either from command-line or in " + + "dspace.cfg): '" + filterNames[i] + "'"); + } else { + filterList.add(filter); + + String filterClassName = filter.getClass().getName(); + + String pluginName = null; + + //If this filter is a SelfNamedPlugin, + //then the input formats it accepts may differ for + //each "named" plugin that it defines. + //So, we have to look for every key that fits the + //following format: filter...inputFormats + if (SelfNamedPlugin.class.isAssignableFrom(filter.getClass())) { + //Get the plugin instance name for this class + pluginName = ((SelfNamedPlugin) filter).getPluginInstanceName(); + } + + + //Retrieve our list of supported formats from dspace.cfg + //For SelfNamedPlugins, format of key is: + // filter...inputFormats + //For other MediaFilters, format of key is: + // filter..inputFormats + String[] formats = + DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty( + FILTER_PREFIX + "." + filterClassName + + (pluginName != null ? "." + pluginName : "") + + "." + INPUT_FORMATS_SUFFIX); + + //add to internal map of filters to supported formats + if (ArrayUtils.isNotEmpty(formats)) { + //For SelfNamedPlugins, map key is: + // + //For other MediaFilters, map key is just: + // + filterFormats.put(filterClassName + + (pluginName != null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR + + pluginName : ""), + Arrays.asList(formats)); + } + } //end if filter!=null + } //end for + + //If verbose, print out loaded mediafilter info + if (isVerbose) { + handler.logInfo("The following MediaFilters are enabled: "); + Iterator i = filterFormats.keySet().iterator(); + while (i.hasNext()) { + String filterName = i.next(); + handler.logInfo("Full Filter Name: " + filterName); + String pluginName = null; + if (filterName.contains(MediaFilterService.FILTER_PLUGIN_SEPARATOR)) { + String[] fields = filterName.split(MediaFilterService.FILTER_PLUGIN_SEPARATOR); + filterName = fields[0]; + pluginName = fields[1]; + } + + handler.logInfo(filterName + (pluginName != null ? " (Plugin: " + pluginName + ")" : "")); + } + } + + mediaFilterService.setFilterFormats(filterFormats); + //store our filter list into an internal array + mediaFilterService.setFilterClasses(filterList); + + + //Retrieve list of identifiers to skip (if any) + + if (skipIds != null && skipIds.length > 0) { + //save to a global skip list + mediaFilterService.setSkipList(Arrays.asList(skipIds)); + } + + Context c = null; + + try { + c = new Context(); + + // have to be super-user to do the filtering + c.turnOffAuthorisationSystem(); + + // now apply the filters + if (identifier == null) { + mediaFilterService.applyFiltersAllItems(c); + } else { + // restrict application scope to identifier + DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, identifier); + if (dso == null) { + throw new IllegalArgumentException("Cannot resolve " + + identifier + " to a DSpace object"); + } + + switch (dso.getType()) { + case Constants.COMMUNITY: + mediaFilterService.applyFiltersCommunity(c, (Community) dso); + break; + case Constants.COLLECTION: + mediaFilterService.applyFiltersCollection(c, (Collection) dso); + break; + case Constants.ITEM: + mediaFilterService.applyFiltersItem(c, (Item) dso); + break; + default: + break; + } + } + + c.complete(); + c = null; + } catch (Exception e) { + handler.handleException(e); + } finally { + if (c != null) { + c.abort(); + } + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java new file mode 100644 index 000000000000..867e684db86b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +public class MediaFilterScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + Options options = new Options(); + options.addOption("v", "verbose", false, "print all extracted text and other details to STDOUT"); + options.addOption("q", "quiet", false, "do not print anything except in the event of errors."); + options.addOption("f", "force", false, "force all bitstreams to be processed"); + options.addOption("i", "identifier", true, "ONLY process bitstreams belonging to identifier"); + options.addOption("m", "maximum", true, "process no more than maximum items"); + options.addOption("h", "help", false, "help"); + + Option pluginOption = Option.builder("p") + .longOpt("plugins") + .hasArg() + .hasArgs() + .valueSeparator(',') + .desc( + "ONLY run the specified Media Filter plugin(s)\n" + + "listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" + + "Separate multiple with a comma (,)\n" + + "(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text" + + " Extractor\")") + .build(); + options.addOption(pluginOption); + + Option skipOption = Option.builder("s") + .longOpt("skip") + .hasArg() + .hasArgs() + .valueSeparator(',') + .desc( + "SKIP the bitstreams belonging to identifier\n" + + "Separate multiple identifiers with a comma (,)\n" + + "(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)") + .build(); + options.addOption(skipOption); + + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java index eb5eaaa255ef..b50fb22355a3 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterServiceImpl.java @@ -8,13 +8,18 @@ package org.dspace.app.mediafilter; import java.io.InputStream; +import java.sql.SQLException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.dspace.app.mediafilter.service.MediaFilterService; +import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.BitstreamFormat; @@ -34,7 +39,9 @@ import org.dspace.core.SelfNamedPlugin; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; +import org.dspace.scripts.handler.DSpaceRunnableHandler; import org.dspace.services.ConfigurationService; +import org.dspace.util.ThrowableUtils; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -67,6 +74,8 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB @Autowired(required = true) protected ConfigurationService configurationService; + protected DSpaceRunnableHandler handler; + protected int max2Process = Integer.MAX_VALUE; // maximum number items to process protected int processed = 0; // number items processed @@ -218,23 +227,9 @@ public boolean filterBitstream(Context context, Item myItem, filtered = true; } } catch (Exception e) { - String handle = myItem.getHandle(); - List bundles = myBitstream.getBundles(); - long size = myBitstream.getSizeBytes(); - String checksum = myBitstream.getChecksum() + " (" + myBitstream.getChecksumAlgorithm() + ")"; - int assetstore = myBitstream.getStoreNumber(); - // Printout helpful information to find the errored bitstream. - System.out.println("ERROR filtering, skipping bitstream:\n"); - System.out.println("\tItem Handle: " + handle); - for (Bundle bundle : bundles) { - System.out.println("\tBundle Name: " + bundle.getName()); - } - System.out.println("\tFile Size: " + size); - System.out.println("\tChecksum: " + checksum); - System.out.println("\tAsset Store: " + assetstore); - System.out.println(e); - e.printStackTrace(); + logError(formatBitstreamDetails(myItem.getHandle(), myBitstream)); + logError(ThrowableUtils.formatCauseChain(e)); } } else if (filterClass instanceof SelfRegisterInputFormats) { // Filter implements self registration, so check to see if it should be applied @@ -287,7 +282,7 @@ public boolean filterBitstream(Context context, Item myItem, filtered = true; } } catch (Exception e) { - System.out.println("ERROR filtering, skipping bitstream #" + logError("ERROR filtering, skipping bitstream #" + myBitstream.getID() + " " + e); e.printStackTrace(); } @@ -312,27 +307,27 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo // check if destination bitstream exists Bundle existingBundle = null; - Bitstream existingBitstream = null; + List existingBitstreams = new ArrayList<>(); List bundles = itemService.getBundles(item, formatFilter.getBundleName()); - if (bundles.size() > 0) { - // only finds the last match (FIXME?) + if (!bundles.isEmpty()) { + // only finds the last matching bundle and all matching bitstreams in the proper bundle(s) for (Bundle bundle : bundles) { List bitstreams = bundle.getBitstreams(); for (Bitstream bitstream : bitstreams) { if (bitstream.getName().trim().equals(newName.trim())) { existingBundle = bundle; - existingBitstream = bitstream; + existingBitstreams.add(bitstream); } } } } // if exists and overwrite = false, exit - if (!overWrite && (existingBitstream != null)) { + if (!overWrite && (!existingBitstreams.isEmpty())) { if (!isQuiet) { - System.out.println("SKIPPED: bitstream " + source.getID() + logInfo("SKIPPED: bitstream " + source.getID() + " (item: " + item.getHandle() + ") because '" + newName + "' already exists"); } @@ -340,11 +335,11 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo } if (isVerbose) { - System.out.println("PROCESSING: bitstream " + source.getID() + logInfo("PROCESSING: bitstream " + source.getID() + " (item: " + item.getHandle() + ")"); } - System.out.println("File: " + newName); + logInfo("File: " + newName); // start filtering of the bitstream, using try with resource to close all InputStreams properly try ( @@ -356,14 +351,14 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo ) { if (destStream == null) { if (!isQuiet) { - System.out.println("SKIPPED: bitstream " + source.getID() + logInfo("SKIPPED: bitstream " + source.getID() + " (item: " + item.getHandle() + ") because filtering was unsuccessful"); } return false; } Bundle targetBundle; // bundle we're modifying - if (bundles.size() < 1) { + if (bundles.isEmpty()) { // create new bundle if needed targetBundle = bundleService.create(context, item, formatFilter.getBundleName()); } else { @@ -385,40 +380,94 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo bitstreamService.update(context, b); //Set permissions on the derivative bitstream - //- First remove any existing policies - authorizeService.removeAllPolicies(context, b); - - //- Determine if this is a public-derivative format - if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) { - //- Set derivative bitstream to be publicly accessible - Group anonymous = groupService.findByName(context, Group.ANONYMOUS); - authorizeService.addPolicy(context, b, Constants.READ, anonymous); - } else { - //- Inherit policies from the source bitstream - authorizeService.inheritPolicies(context, source, b); - } + updatePoliciesOfDerivativeBitstream(context, b, formatFilter, source); //do post-processing of the generated bitstream formatFilter.postProcessBitstream(context, item, b); } catch (OutOfMemoryError oome) { - System.out.println("!!! OutOfMemoryError !!!"); + logError("!!! OutOfMemoryError !!!"); + logError(formatBitstreamDetails(item.getHandle(), source)); } - // fixme - set date? // we are overwriting, so remove old bitstream - if (existingBitstream != null) { + for (Bitstream existingBitstream : existingBitstreams) { bundleService.removeBitstream(context, existingBundle, existingBitstream); } if (!isQuiet) { - System.out.println("FILTERED: bitstream " + source.getID() + logInfo("FILTERED: bitstream " + source.getID() + " (item: " + item.getHandle() + ") and created '" + newName + "'"); } return true; } + @Override + public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source) + throws SQLException, AuthorizeException { + + if (filterClasses == null) { + return; + } + + for (FormatFilter formatFilter : filterClasses) { + for (Bitstream bitstream : findDerivativeBitstreams(item, source, formatFilter)) { + updatePoliciesOfDerivativeBitstream(context, bitstream, formatFilter, source); + } + } + } + + /** + * find derivative bitstreams related to source bitstream + * + * @param item item containing bitstreams + * @param source source bitstream + * @param formatFilter formatFilter + * @return list of derivative bitstreams from source bitstream + * @throws SQLException If something goes wrong in the database + */ + private List findDerivativeBitstreams(Item item, Bitstream source, FormatFilter formatFilter) + throws SQLException { + + String bitstreamName = formatFilter.getFilteredName(source.getName()); + List bundles = itemService.getBundles(item, formatFilter.getBundleName()); + + return bundles.stream() + .flatMap(bundle -> + bundle.getBitstreams().stream()) + .filter(bitstream -> + StringUtils.equals(bitstream.getName().trim(), bitstreamName.trim())) + .collect(Collectors.toList()); + } + + /** + * update resource polices of derivative bitstreams. + * by remove all resource policies and + * set derivative bitstreams to be publicly accessible or + * replace derivative bitstreams policies using + * the same in the source bitstream. + * + * @param context the context + * @param bitstream derivative bitstream + * @param formatFilter formatFilter + * @param source the source bitstream + * @throws SQLException If something goes wrong in the database + * @throws AuthorizeException if authorization error + */ + private void updatePoliciesOfDerivativeBitstream(Context context, Bitstream bitstream, FormatFilter formatFilter, + Bitstream source) throws SQLException, AuthorizeException { + + authorizeService.removeAllPolicies(context, bitstream); + + if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) { + Group anonymous = groupService.findByName(context, Group.ANONYMOUS); + authorizeService.addPolicy(context, bitstream, Constants.READ, anonymous); + } else { + authorizeService.replaceAllPolicies(context, source, bitstream); + } + } + @Override public Item getCurrentItem() { return currentItem; @@ -428,7 +477,7 @@ public Item getCurrentItem() { public boolean inSkipList(String identifier) { if (skipList != null && skipList.contains(identifier)) { if (!isQuiet) { - System.out.println("SKIP-LIST: skipped bitstreams within identifier " + identifier); + logInfo("SKIP-LIST: skipped bitstreams within identifier " + identifier); } return true; } else { @@ -436,6 +485,59 @@ public boolean inSkipList(String identifier) { } } + /** + * Describe a Bitstream in detail. Format a single line of text with + * information such as Bitstore index, backing file ID, size, checksum, + * enclosing Item and Bundles. + * + * @param itemHandle Handle of the Item by which we found the Bitstream. + * @param bitstream the Bitstream to be described. + * @return Bitstream details. + */ + private String formatBitstreamDetails(String itemHandle, + Bitstream bitstream) { + List bundles; + try { + bundles = bitstream.getBundles(); + } catch (SQLException ex) { + logError("Unexpected error fetching Bundles", ex); + bundles = Collections.EMPTY_LIST; + } + StringBuilder sb = new StringBuilder("ERROR filtering, skipping bitstream:\n"); + sb.append("\tItem Handle: ").append(itemHandle); + for (Bundle bundle : bundles) { + sb.append("\tBundle Name: ").append(bundle.getName()); + } + sb.append("\tFile Size: ").append(bitstream.getSizeBytes()); + sb.append("\tChecksum: ").append(bitstream.getChecksum()) + .append(" (").append(bitstream.getChecksumAlgorithm()).append(')'); + sb.append("\tAsset Store: ").append(bitstream.getStoreNumber()); + sb.append("\tInternal ID: ").append(bitstream.getInternalId()); + return sb.toString(); + } + + private void logInfo(String message) { + if (handler != null) { + handler.logInfo(message); + } else { + System.out.println(message); + } + } + private void logError(String message) { + if (handler != null) { + handler.logError(message); + } else { + System.out.println(message); + } + } + private void logError(String message, Exception e) { + if (handler != null) { + handler.logError(message, e); + } else { + System.out.println(message); + } + } + @Override public void setVerbose(boolean isVerbose) { this.isVerbose = isVerbose; @@ -470,4 +572,9 @@ public void setSkipList(List skipList) { public void setFilterFormats(Map> filterFormats) { this.filterFormats = filterFormats; } + + @Override + public void setLogHandler(DSpaceRunnableHandler handler) { + this.handler = handler; + } } diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/PDFFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/PDFFilter.java deleted file mode 100644 index c90d7c5a6e97..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/PDFFilter.java +++ /dev/null @@ -1,137 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.InputStream; -import java.io.OutputStreamWriter; -import java.io.Writer; - -import org.apache.logging.log4j.Logger; -import org.apache.pdfbox.pdmodel.PDDocument; -import org.apache.pdfbox.pdmodel.encryption.InvalidPasswordException; -import org.apache.pdfbox.text.PDFTextStripper; -import org.dspace.content.Item; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; - -/* - * - * to do: helpful error messages - can't find mediafilter.cfg - can't - * instantiate filter - bitstream format doesn't exist - * - */ -public class PDFFilter extends MediaFilter { - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PDFFilter.class); - - @Override - public String getFilteredName(String oldFilename) { - return oldFilename + ".txt"; - } - - /** - * @return String bundle name - */ - @Override - public String getBundleName() { - return "TEXT"; - } - - /** - * @return String bitstreamformat - */ - @Override - public String getFormatString() { - return "Text"; - } - - /** - * @return String description - */ - @Override - public String getDescription() { - return "Extracted text"; - } - - /** - * @param currentItem item - * @param source source input stream - * @param verbose verbose mode - * @return InputStream the resulting input stream - * @throws Exception if error - */ - @Override - public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) - throws Exception { - ConfigurationService configurationService - = DSpaceServicesFactory.getInstance().getConfigurationService(); - try { - boolean useTemporaryFile = configurationService.getBooleanProperty("pdffilter.largepdfs", false); - - // get input stream from bitstream - // pass to filter, get string back - PDFTextStripper pts = new PDFTextStripper(); - pts.setSortByPosition(true); - PDDocument pdfDoc = null; - Writer writer = null; - File tempTextFile = null; - ByteArrayOutputStream byteStream = null; - - if (useTemporaryFile) { - tempTextFile = File.createTempFile("dspacepdfextract" + source.hashCode(), ".txt"); - tempTextFile.deleteOnExit(); - writer = new OutputStreamWriter(new FileOutputStream(tempTextFile)); - } else { - byteStream = new ByteArrayOutputStream(); - writer = new OutputStreamWriter(byteStream); - } - - try { - pdfDoc = PDDocument.load(source); - pts.writeText(pdfDoc, writer); - } catch (InvalidPasswordException ex) { - log.error("PDF is encrypted. Cannot extract text (item: {})", - () -> currentItem.getHandle()); - return null; - } finally { - try { - if (pdfDoc != null) { - pdfDoc.close(); - } - } catch (Exception e) { - log.error("Error closing PDF file: " + e.getMessage(), e); - } - - try { - writer.close(); - } catch (Exception e) { - log.error("Error closing temporary extract file: " + e.getMessage(), e); - } - } - - if (useTemporaryFile) { - return new FileInputStream(tempTextFile); - } else { - byte[] bytes = byteStream.toByteArray(); - return new ByteArrayInputStream(bytes); - } - } catch (OutOfMemoryError oome) { - log.error("Error parsing PDF document " + oome.getMessage(), oome); - if (!configurationService.getBooleanProperty("pdffilter.skiponmemoryexception", false)) { - throw oome; - } - } - - return null; - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/PoiWordFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/PoiWordFilter.java deleted file mode 100644 index 158f52f1f9cc..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/PoiWordFilter.java +++ /dev/null @@ -1,71 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; - -import org.apache.poi.POITextExtractor; -import org.apache.poi.extractor.ExtractorFactory; -import org.apache.poi.openxml4j.exceptions.OpenXML4JException; -import org.apache.xmlbeans.XmlException; -import org.dspace.content.Item; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Extract flat text from Microsoft Word documents (.doc, .docx). - */ -public class PoiWordFilter - extends MediaFilter { - private static final Logger LOG = LoggerFactory.getLogger(PoiWordFilter.class); - - @Override - public String getFilteredName(String oldFilename) { - return oldFilename + ".txt"; - } - - @Override - public String getBundleName() { - return "TEXT"; - } - - @Override - public String getFormatString() { - return "Text"; - } - - @Override - public String getDescription() { - return "Extracted text"; - } - - @Override - public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) - throws Exception { - String text; - try { - // get input stream from bitstream, pass to filter, get string back - POITextExtractor extractor = ExtractorFactory.createExtractor(source); - text = extractor.getText(); - } catch (IOException | OpenXML4JException | XmlException e) { - System.err.format("Invalid File Format: %s%n", e.getMessage()); - LOG.error("Unable to parse the bitstream: ", e); - throw e; - } - - // if verbose flag is set, print out extracted text to STDOUT - if (verbose) { - System.out.println(text); - } - - // return the extracted text as a stream. - return new ByteArrayInputStream(text.getBytes()); - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/PowerPointFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/PowerPointFilter.java deleted file mode 100644 index 86b7096f68f9..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/PowerPointFilter.java +++ /dev/null @@ -1,113 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -import org.apache.logging.log4j.Logger; -import org.apache.poi.POITextExtractor; -import org.apache.poi.extractor.ExtractorFactory; -import org.apache.poi.hslf.extractor.PowerPointExtractor; -import org.apache.poi.xslf.extractor.XSLFPowerPointExtractor; -import org.dspace.content.Item; - -/* - * TODO: Allow user to configure extraction of only text or only notes - * - */ -public class PowerPointFilter extends MediaFilter { - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PowerPointFilter.class); - - @Override - public String getFilteredName(String oldFilename) { - return oldFilename + ".txt"; - } - - /** - * @return String bundle name - */ - @Override - public String getBundleName() { - return "TEXT"; - } - - /** - * @return String bitstream format - * - * TODO: Check that this is correct - */ - @Override - public String getFormatString() { - return "Text"; - } - - /** - * @return String description - */ - @Override - public String getDescription() { - return "Extracted text"; - } - - /** - * @param currentItem item - * @param source source input stream - * @param verbose verbose mode - * @return InputStream the resulting input stream - * @throws Exception if error - */ - @Override - public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) - throws Exception { - - try { - - String extractedText = null; - new ExtractorFactory(); - POITextExtractor pptExtractor = ExtractorFactory - .createExtractor(source); - - // PowerPoint XML files and legacy format PowerPoint files - // require different classes and APIs for text extraction - - // If this is a PowerPoint XML file, extract accordingly - if (pptExtractor instanceof XSLFPowerPointExtractor) { - - // The true method arguments indicate that text from - // the slides and the notes is desired - extractedText = ((XSLFPowerPointExtractor) pptExtractor) - .getText(true, true); - } else if (pptExtractor instanceof PowerPointExtractor) { // Legacy PowerPoint files - - extractedText = ((PowerPointExtractor) pptExtractor).getText() - + " " + ((PowerPointExtractor) pptExtractor).getNotes(); - - } - if (extractedText != null) { - // if verbose flag is set, print out extracted text - // to STDOUT - if (verbose) { - System.out.println(extractedText); - } - - // generate an input stream with the extracted text - byte[] textBytes = extractedText.getBytes(); - ByteArrayInputStream bais = new ByteArrayInputStream(textBytes); - - return bais; - } - } catch (Exception e) { - log.error("Error filtering bitstream: " + e.getMessage(), e); - throw e; - } - - return null; - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/TikaTextExtractionFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/TikaTextExtractionFilter.java new file mode 100644 index 000000000000..e83bf706ed02 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/TikaTextExtractionFilter.java @@ -0,0 +1,183 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.tika.Tika; +import org.apache.tika.exception.TikaException; +import org.apache.tika.metadata.Metadata; +import org.apache.tika.parser.AutoDetectParser; +import org.apache.tika.sax.BodyContentHandler; +import org.apache.tika.sax.ContentHandlerDecorator; +import org.dspace.content.Item; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.xml.sax.SAXException; + +/** + * Text Extraction media filter which uses Apache Tika to extract text from a large number of file formats (including + * all Microsoft formats, PDF, HTML, Text, etc). For a more complete list of file formats supported by Tika see the + * Tika documentation: https://tika.apache.org/2.3.0/formats.html + */ +public class TikaTextExtractionFilter + extends MediaFilter { + private final static Logger log = LogManager.getLogger(); + + @Override + public String getFilteredName(String oldFilename) { + return oldFilename + ".txt"; + } + + @Override + public String getBundleName() { + return "TEXT"; + } + + @Override + public String getFormatString() { + return "Text"; + } + + @Override + public String getDescription() { + return "Extracted text"; + } + + @Override + public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose) + throws Exception { + ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + boolean useTemporaryFile = configurationService.getBooleanProperty("textextractor.use-temp-file", false); + + if (useTemporaryFile) { + // Extract text out of source file using a temp file, returning results as InputStream + return extractUsingTempFile(source, verbose); + } + + // Not using temporary file. We'll use Tika's default in-memory parsing. + // Get maximum characters to extract. Default is 100,000 chars, which is also Tika's default setting. + String extractedText; + int maxChars = configurationService.getIntProperty("textextractor.max-chars", 100000); + try { + // Use Tika to extract text from input. Tika will automatically detect the file type. + Tika tika = new Tika(); + tika.setMaxStringLength(maxChars); // Tell Tika the maximum number of characters to extract + extractedText = tika.parseToString(source); + } catch (IOException e) { + System.err.format("Unable to extract text from bitstream in Item %s%n", currentItem.getID().toString()); + e.printStackTrace(); + log.error("Unable to extract text from bitstream in Item {}", currentItem.getID().toString(), e); + throw e; + } catch (OutOfMemoryError oe) { + System.err.format("OutOfMemoryError occurred when extracting text from bitstream in Item %s. " + + "You may wish to enable 'textextractor.use-temp-file'.%n", currentItem.getID().toString()); + oe.printStackTrace(); + log.error("OutOfMemoryError occurred when extracting text from bitstream in Item {}. " + + "You may wish to enable 'textextractor.use-temp-file'.", currentItem.getID().toString(), oe); + throw oe; + } + + if (StringUtils.isNotEmpty(extractedText)) { + // if verbose flag is set, print out extracted text to STDOUT + if (verbose) { + System.out.println("(Verbose mode) Extracted text:"); + System.out.println(extractedText); + } + + // return the extracted text as a UTF-8 stream. + return new ByteArrayInputStream(extractedText.getBytes(StandardCharsets.UTF_8)); + } + return null; + } + + /** + * Extracts the text out of a given source InputStream, using a temporary file. This decreases the amount of memory + * necessary for text extraction, but can be slower as it requires writing extracted text to a temporary file. + * @param source source InputStream + * @param verbose verbose mode enabled/disabled + * @return InputStream for temporary file containing extracted text + * @throws IOException + * @throws SAXException + * @throws TikaException + */ + private InputStream extractUsingTempFile(InputStream source, boolean verbose) + throws IOException, TikaException, SAXException { + File tempExtractedTextFile = File.createTempFile("dspacetextextract" + source.hashCode(), ".txt"); + + if (verbose) { + System.out.println("(Verbose mode) Extracted text was written to temporary file at " + + tempExtractedTextFile.getAbsolutePath()); + } else { + tempExtractedTextFile.deleteOnExit(); + } + + // Open temp file for writing + try (FileWriter writer = new FileWriter(tempExtractedTextFile, StandardCharsets.UTF_8)) { + // Initialize a custom ContentHandlerDecorator which is a BodyContentHandler. + // This mimics the behavior of Tika().parseToString(), which only extracts text from the body of the file. + // This custom Handler writes any extracted text to the temp file. + ContentHandlerDecorator handler = new BodyContentHandler(new ContentHandlerDecorator() { + /** + * Write all extracted characters directly to the temp file. + */ + @Override + public void characters(char[] ch, int start, int length) throws SAXException { + try { + writer.append(new String(ch), start, length); + } catch (IOException e) { + String errorMsg = String.format("Could not append to temporary file at %s " + + "when performing text extraction", + tempExtractedTextFile.getAbsolutePath()); + log.error(errorMsg, e); + throw new SAXException(errorMsg, e); + } + } + + /** + * Write all ignorable whitespace directly to the temp file. + * This mimics the behaviour of Tika().parseToString() which extracts ignorableWhitespace characters + * (like blank lines, indentations, etc.), so that we get the same extracted text either way. + */ + @Override + public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException { + try { + writer.append(new String(ch), start, length); + } catch (IOException e) { + String errorMsg = String.format("Could not append to temporary file at %s " + + "when performing text extraction", + tempExtractedTextFile.getAbsolutePath()); + log.error(errorMsg, e); + throw new SAXException(errorMsg, e); + } + } + }); + + AutoDetectParser parser = new AutoDetectParser(); + Metadata metadata = new Metadata(); + // parse our source InputStream using the above custom handler + parser.parse(source, handler, metadata); + } + + // At this point, all extracted text is written to our temp file. So, return a FileInputStream for that file + return new FileInputStream(tempExtractedTextFile); + } + + + + +} diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java index 83198a50831a..bc92ff521098 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/service/MediaFilterService.java @@ -7,15 +7,18 @@ */ package org.dspace.app.mediafilter.service; +import java.sql.SQLException; import java.util.List; import java.util.Map; import org.dspace.app.mediafilter.FormatFilter; +import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.core.Context; +import org.dspace.scripts.handler.DSpaceRunnableHandler; /** * MediaFilterManager is the class that invokes the media/format filters over the @@ -90,6 +93,22 @@ public void applyFiltersCollection(Context context, Collection collection) public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter) throws Exception; + /** + * update resource polices of derivative bitstreams + * related to source bitstream. + * set derivative bitstreams to be publicly accessible or + * replace derivative bitstreams policies using + * the same in the source bitstream. + * + * @param context context + * @param item item containing bitstreams + * @param source source bitstream + * @throws SQLException If something goes wrong in the database + * @throws AuthorizeException if authorization error + */ + public void updatePoliciesOfDerivativeBitstreams(Context context, Item item, Bitstream source) + throws SQLException, AuthorizeException; + /** * Return the item that is currently being processed/filtered * by the MediaFilterManager. @@ -124,4 +143,10 @@ public boolean processBitstream(Context context, Item item, Bitstream source, Fo public void setSkipList(List skipList); public void setFilterFormats(Map> filterFormats); + + /** + * Set the log handler used in the DSpace scripts and processes framework + * @param handler + */ + public void setLogHandler(DSpaceRunnableHandler handler); } diff --git a/dspace-api/src/main/java/org/dspace/app/packager/Packager.java b/dspace-api/src/main/java/org/dspace/app/packager/Packager.java index 0e985bd244ae..21d156268609 100644 --- a/dspace-api/src/main/java/org/dspace/app/packager/Packager.java +++ b/dspace-api/src/main/java/org/dspace/app/packager/Packager.java @@ -631,7 +631,7 @@ protected void disseminate(Context context, PackageDisseminator dip, //otherwise, just disseminate a single object to a single package file dip.disseminate(context, dso, pkgParams, pkgFile); - if (pkgFile != null && pkgFile.exists()) { + if (pkgFile.exists()) { System.out.println("\nCREATED package file: " + pkgFile.getCanonicalPath()); } } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategy.java new file mode 100644 index 000000000000..135406069ae3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategy.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.requestitem; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.springframework.lang.NonNull; + +/** + * Derive request recipients from groups of the Collection which owns an Item. + * The list will include all members of the administrators group. If the + * resulting list is empty, delegates to {@link RequestItemHelpdeskStrategy}. + * + * @author Mark H. Wood + */ +public class CollectionAdministratorsRequestItemStrategy + extends RequestItemHelpdeskStrategy { + @Override + @NonNull + public List getRequestItemAuthor(Context context, + Item item) + throws SQLException { + List recipients = new ArrayList<>(); + Collection collection = item.getOwningCollection(); + for (EPerson admin : collection.getAdministrators().getMembers()) { + recipients.add(new RequestItemAuthor(admin)); + } + if (recipients.isEmpty()) { + return super.getRequestItemAuthor(context, item); + } else { + return recipients; + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/CombiningRequestItemStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/CombiningRequestItemStrategy.java new file mode 100644 index 000000000000..8292c1a72835 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/CombiningRequestItemStrategy.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.springframework.lang.NonNull; +import org.springframework.util.Assert; + +/** + * Assemble a list of recipients from the results of other strategies. + * The list of strategy classes is injected as the constructor argument + * {@code strategies}. + * If the strategy list is not configured, returns an empty List. + * + * @author Mark H. Wood + */ +public class CombiningRequestItemStrategy + implements RequestItemAuthorExtractor { + /** The strategies to combine. */ + private final List strategies; + + /** + * Initialize a combination of strategies. + * @param strategies the author extraction strategies to combine. + */ + public CombiningRequestItemStrategy(@NonNull List strategies) { + Assert.notNull(strategies, "Strategy list may not be null"); + this.strategies = strategies; + } + + /** + * Do not call. + * @throws IllegalArgumentException always + */ + private CombiningRequestItemStrategy() { + throw new IllegalArgumentException(); + } + + @Override + @NonNull + public List getRequestItemAuthor(Context context, Item item) + throws SQLException { + List recipients = new ArrayList<>(); + + for (RequestItemAuthorExtractor strategy : strategies) { + recipients.addAll(strategy.getRequestItemAuthor(context, item)); + } + + return recipients; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java index 8a77a591b7d0..cdefd1298c6e 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItem.java @@ -27,13 +27,12 @@ import org.dspace.core.ReloadableEntity; /** - * Object representing an Item Request + * Object representing an Item Request. */ @Entity @Table(name = "requestitem") public class RequestItem implements ReloadableEntity { - @Id @Column(name = "requestitem_id") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "requestitem_seq") @@ -54,8 +53,6 @@ public class RequestItem implements ReloadableEntity { @Column(name = "request_name", length = 64) private String reqName; - // @Column(name = "request_message") -// @Lob @Column(name = "request_message", columnDefinition = "text") private String reqMessage; @@ -82,8 +79,8 @@ public class RequestItem implements ReloadableEntity { /** * Protected constructor, create object using: - * {@link org.dspace.app.requestitem.service.RequestItemService#createRequest(Context, Bitstream, Item, - * boolean, String, String, String)} + * {@link org.dspace.app.requestitem.service.RequestItemService#createRequest( + * Context, Bitstream, Item, boolean, String, String, String)} */ protected RequestItem() { } @@ -97,6 +94,9 @@ void setAllfiles(boolean allfiles) { this.allfiles = allfiles; } + /** + * @return {@code true} if all of the Item's files are requested. + */ public boolean isAllfiles() { return allfiles; } @@ -105,6 +105,9 @@ void setReqMessage(String reqMessage) { this.reqMessage = reqMessage; } + /** + * @return a message from the requester. + */ public String getReqMessage() { return reqMessage; } @@ -113,6 +116,9 @@ void setReqName(String reqName) { this.reqName = reqName; } + /** + * @return Human-readable name of the user requesting access. + */ public String getReqName() { return reqName; } @@ -121,6 +127,9 @@ void setReqEmail(String reqEmail) { this.reqEmail = reqEmail; } + /** + * @return address of the user requesting access. + */ public String getReqEmail() { return reqEmail; } @@ -129,6 +138,9 @@ void setToken(String token) { this.token = token; } + /** + * @return a unique request identifier which can be emailed. + */ public String getToken() { return token; } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java index 49e26fe00bd3..a189e4a5efdd 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthor.java @@ -11,20 +11,31 @@ /** * Simple DTO to transfer data about the corresponding author for the Request - * Copy feature + * Copy feature. * * @author Andrea Bollini */ public class RequestItemAuthor { - private String fullName; - private String email; + private final String fullName; + private final String email; + /** + * Construct an author record from given data. + * + * @param fullName the author's full name. + * @param email the author's email address. + */ public RequestItemAuthor(String fullName, String email) { super(); this.fullName = fullName; this.email = email; } + /** + * Construct an author from an EPerson's metadata. + * + * @param ePerson the EPerson. + */ public RequestItemAuthor(EPerson ePerson) { super(); this.fullName = ePerson.getFullName(); diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java index 9b66030e9030..5c6e48ee3f85 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemAuthorExtractor.java @@ -8,26 +8,28 @@ package org.dspace.app.requestitem; import java.sql.SQLException; +import java.util.List; import org.dspace.content.Item; import org.dspace.core.Context; +import org.springframework.lang.NonNull; /** - * Interface to abstract the strategy for select the author to contact for - * request copy + * Interface to abstract the strategy for selecting the author to contact for + * request copy. * * @author Andrea Bollini */ public interface RequestItemAuthorExtractor { - /** - * Retrieve the auhtor to contact for a request copy of the give item. + * Retrieve the author to contact for requesting a copy of the given item. * * @param context DSpace context object * @param item item to request - * @return An object containing name an email address to send the request to - * or null if no valid email address was found. + * @return Names and email addresses to send the request to. * @throws SQLException if database error */ - public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException; + @NonNull + public List getRequestItemAuthor(Context context, Item item) + throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java new file mode 100644 index 000000000000..6499c45a7830 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -0,0 +1,300 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.requestitem; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.List; +import javax.annotation.ManagedBean; +import javax.inject.Inject; +import javax.inject.Singleton; +import javax.mail.MessagingException; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.app.requestitem.service.RequestItemService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.core.Email; +import org.dspace.core.I18nUtil; +import org.dspace.core.LogHelper; +import org.dspace.eperson.EPerson; +import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; + +/** + * Send item requests and responses by email. + * + *

The "strategy" by which approvers are chosen is in an implementation of + * {@link RequestItemAuthorExtractor} which is injected by the name + * {@code requestItemAuthorExtractor}. See the DI configuration documents. + * + * @author Mark H. Wood + */ +@Singleton +@ManagedBean +public class RequestItemEmailNotifier { + private static final Logger LOG = LogManager.getLogger(); + + @Inject + protected BitstreamService bitstreamService; + + @Inject + protected ConfigurationService configurationService; + + @Inject + protected HandleService handleService; + + @Inject + protected RequestItemService requestItemService; + + protected final RequestItemAuthorExtractor requestItemAuthorExtractor; + + @Inject + public RequestItemEmailNotifier(RequestItemAuthorExtractor requestItemAuthorExtractor) { + this.requestItemAuthorExtractor = requestItemAuthorExtractor; + } + + /** + * Send the request to the approver(s). + * + * @param context current DSpace session. + * @param ri the request. + * @param responseLink link back to DSpace to send the response. + * @throws IOException passed through. + * @throws SQLException if the message was not sent. + */ + public void sendRequest(Context context, RequestItem ri, String responseLink) + throws IOException, SQLException { + // Who is making this request? + List authors = requestItemAuthorExtractor + .getRequestItemAuthor(context, ri.getItem()); + + // Build an email to the approver. + Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), + "request_item.author")); + for (RequestItemAuthor author : authors) { + email.addRecipient(author.getEmail()); + } + email.setReplyTo(ri.getReqEmail()); // Requester's address + + email.addArgument(ri.getReqName()); // {0} Requester's name + + email.addArgument(ri.getReqEmail()); // {1} Requester's address + + email.addArgument(ri.isAllfiles() // {2} All bitstreams or just one? + ? I18nUtil.getMessage("itemRequest.all") : ri.getBitstream().getName()); + + email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {3} + + email.addArgument(ri.getItem().getName()); // {4} requested item's title + + email.addArgument(ri.getReqMessage()); // {5} message from requester + + email.addArgument(responseLink); // {6} Link back to DSpace for action + + StringBuilder names = new StringBuilder(); + StringBuilder addresses = new StringBuilder(); + for (RequestItemAuthor author : authors) { + if (names.length() > 0) { + names.append("; "); + addresses.append("; "); + } + names.append(author.getFullName()); + addresses.append(author.getEmail()); + } + email.addArgument(names.toString()); // {7} corresponding author name + email.addArgument(addresses.toString()); // {8} corresponding author email + + email.addArgument(configurationService.getProperty("dspace.name")); // {9} + + email.addArgument(configurationService.getProperty("mail.helpdesk")); // {10} + + // Send the email. + try { + email.send(); + Bitstream bitstream = ri.getBitstream(); + String bitstreamID; + if (null == bitstream) { + bitstreamID = "null"; + } else { + bitstreamID = ri.getBitstream().getID().toString(); + } + LOG.info(LogHelper.getHeader(context, + "sent_email_requestItem", + "submitter_id={},bitstream_id={},requestEmail={}"), + ri.getReqEmail(), bitstreamID, ri.getReqEmail()); + } catch (MessagingException e) { + LOG.warn(LogHelper.getHeader(context, + "error_mailing_requestItem", e.getMessage())); + throw new IOException("Request not sent: " + e.getMessage()); + } + } + + /** + * Send the approver's response back to the requester, with files attached + * if approved. + * + * @param context current DSpace session. + * @param ri the request. + * @param subject email subject header value. + * @param message email body (may be empty). + * @throws IOException if sending failed. + */ + public void sendResponse(Context context, RequestItem ri, String subject, + String message) + throws IOException { + // Who granted this request? + List grantors; + try { + grantors = requestItemAuthorExtractor.getRequestItemAuthor(context, ri.getItem()); + } catch (SQLException e) { + LOG.warn("Failed to get grantor's name and address: {}", e.getMessage()); + grantors = List.of(); + } + + String grantorName; + String grantorAddress; + if (grantors.isEmpty()) { + grantorName = configurationService.getProperty("mail.admin.name"); + grantorAddress = configurationService.getProperty("mail.admin"); + } else { + RequestItemAuthor grantor = grantors.get(0); // XXX Cannot know which one + grantorName = grantor.getFullName(); + grantorAddress = grantor.getEmail(); + } + + // Build an email back to the requester. + Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), + ri.isAccept_request() ? "request_item.granted" : "request_item.rejected")); + email.addArgument(ri.getReqName()); // {0} requestor's name + email.addArgument(handleService.getCanonicalForm(ri.getItem().getHandle())); // {1} URL of the requested Item + email.addArgument(ri.getItem().getName()); // {2} title of the requested Item + email.addArgument(grantorName); // {3} name of the grantor + email.addArgument(grantorAddress); // {4} email of the grantor + email.addArgument(message); // {5} grantor's optional message + email.setSubject(subject); + email.addRecipient(ri.getReqEmail()); + // Attach bitstreams. + try { + if (ri.isAccept_request()) { + if (ri.isAllfiles()) { + Item item = ri.getItem(); + List bundles = item.getBundles("ORIGINAL"); + for (Bundle bundle : bundles) { + List bitstreams = bundle.getBitstreams(); + for (Bitstream bitstream : bitstreams) { + if (!bitstream.getFormat(context).isInternal() && + requestItemService.isRestricted(context, + bitstream)) { + // #8636 Anyone receiving the email can respond to the + // request without authenticating into DSpace + context.turnOffAuthorisationSystem(); + email.addAttachment( + bitstreamService.retrieve(context, bitstream), + bitstream.getName(), + bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); + } + } + } + } else { + Bitstream bitstream = ri.getBitstream(); + // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + context.turnOffAuthorisationSystem(); + email.addAttachment(bitstreamService.retrieve(context, bitstream), + bitstream.getName(), + bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); + } + email.send(); + } else { + boolean sendRejectEmail = configurationService + .getBooleanProperty("request.item.reject.email", true); + // Not all sites want the "refusal" to be sent back to the requester via + // email. However, by default, the rejection email is sent back. + if (sendRejectEmail) { + email.send(); + } + } + } catch (MessagingException | IOException | SQLException | AuthorizeException e) { + LOG.warn(LogHelper.getHeader(context, + "error_mailing_requestItem", e.getMessage())); + throw new IOException("Reply not sent: " + e.getMessage()); + } + LOG.info(LogHelper.getHeader(context, + "sent_attach_requestItem", "token={}"), ri.getToken()); + } + + /** + * Send, to a repository administrator, a request to open access to a + * requested object. + * + * @param context current DSpace session + * @param ri the item request that the approver is handling + * @throws IOException if the message body cannot be loaded or the message + * cannot be sent. + */ + public void requestOpenAccess(Context context, RequestItem ri) + throws IOException { + Email message = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), + "request_item.admin")); + + // Which Bitstream(s) requested? + Bitstream bitstream = ri.getBitstream(); + String bitstreamName; + if (bitstream != null) { + bitstreamName = bitstream.getName(); + } else { + bitstreamName = "all"; // TODO localize + } + + // Which Item? + Item item = ri.getItem(); + + // Fill the message's placeholders. + EPerson approver = context.getCurrentUser(); + message.addArgument(bitstreamName); // {0} bitstream name or "all" + message.addArgument(item.getHandle()); // {1} Item handle + message.addArgument(ri.getToken()); // {2} Request token + if (approver != null) { + message.addArgument(approver.getFullName()); // {3} Approver's name + message.addArgument(approver.getEmail()); // {4} Approver's address + } else { + message.addArgument("anonymous approver"); // [3] Approver's name + message.addArgument(configurationService.getProperty("mail.admin")); // [4] Approver's address + } + + // Who gets this message? + String recipient; + EPerson submitter = item.getSubmitter(); + if (submitter != null) { + recipient = submitter.getEmail(); + } else { + recipient = configurationService.getProperty("mail.helpdesk"); + } + if (null == recipient) { + recipient = configurationService.getProperty("mail.admin"); + } + message.addRecipient(recipient); + + // Send the message. + try { + message.send(); + } catch (MessagingException ex) { + LOG.warn(LogHelper.getHeader(context, "error_mailing_requestItem", + ex.getMessage())); + throw new IOException("Open Access request not sent: " + ex.getMessage()); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java index 7b63d3ea8dae..dee0ed7a2351 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategy.java @@ -8,6 +8,8 @@ package org.dspace.app.requestitem; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; import org.apache.commons.lang3.StringUtils; import org.dspace.content.Item; @@ -16,36 +18,47 @@ import org.dspace.eperson.EPerson; import org.dspace.eperson.service.EPersonService; import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.lang.NonNull; /** - * RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request - * With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does. + * RequestItem strategy to allow DSpace support team's help desk to receive + * requestItem requests. With this enabled, the Item author/submitter doesn't + * receive the request, but the help desk instead does. * - * Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no - * specified helpdesk email. + *

Fails over to the {@link RequestItemSubmitterStrategy}, which means the + * submitter would get the request if there is no specified help desk email. * * @author Sam Ottenhoff * @author Peter Dietz */ -public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy { +public class RequestItemHelpdeskStrategy + extends RequestItemSubmitterStrategy { + static final String P_HELPDESK_OVERRIDE + = "request.item.helpdesk.override"; + static final String P_MAIL_HELPDESK = "mail.helpdesk"; + @Autowired(required = true) protected EPersonService ePersonService; + @Autowired(required = true) + protected ConfigurationService configurationService; + public RequestItemHelpdeskStrategy() { } @Override - public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException { - ConfigurationService configurationService - = DSpaceServicesFactory.getInstance().getConfigurationService(); + @NonNull + public List getRequestItemAuthor(Context context, Item item) + throws SQLException { boolean helpdeskOverridesSubmitter = configurationService .getBooleanProperty("request.item.helpdesk.override", false); String helpDeskEmail = configurationService.getProperty("mail.helpdesk"); if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) { - return getHelpDeskPerson(context, helpDeskEmail); + List authors = new ArrayList<>(1); + authors.add(getHelpDeskPerson(context, helpDeskEmail)); + return authors; } else { //Fallback to default logic (author of Item) if helpdesk isn't fully enabled or setup return super.getRequestItemAuthor(context, item); @@ -53,16 +66,18 @@ public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws } /** - * Return a RequestItemAuthor object for the specified helpdesk email address. - * It makes an attempt to find if there is a matching eperson for the helpdesk address, to use the name, - * Otherwise it falls back to a helpdeskname key in the Messages.props. + * Return a RequestItemAuthor object for the specified help desk email address. + * It makes an attempt to find if there is a matching {@link EPerson} for + * the help desk address, to use its name. Otherwise it falls back to the + * {@code helpdeskname} key in {@code Messages.properties}. * * @param context context * @param helpDeskEmail email * @return RequestItemAuthor * @throws SQLException if database error */ - public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException { + public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) + throws SQLException { context.turnOffAuthorisationSystem(); EPerson helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail); context.restoreAuthSystemState(); diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java index 9838e586975e..4372ab9b09b0 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemMetadataStrategy.java @@ -8,6 +8,8 @@ package org.dspace.app.requestitem; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import org.apache.commons.lang3.StringUtils; @@ -16,12 +18,13 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Context; import org.dspace.core.I18nUtil; -import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.lang.NonNull; /** * Try to look to an item metadata for the corresponding author name and email. - * Failover to the RequestItemSubmitterStrategy + * Failover to the RequestItemSubmitterStrategy. * * @author Andrea Bollini */ @@ -30,6 +33,9 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy { protected String emailMetadata; protected String fullNameMetadata; + @Autowired(required = true) + protected ConfigurationService configurationService; + @Autowired(required = true) protected ItemService itemService; @@ -37,59 +43,72 @@ public RequestItemMetadataStrategy() { } @Override - public RequestItemAuthor getRequestItemAuthor(Context context, Item item) + @NonNull + public List getRequestItemAuthor(Context context, Item item) throws SQLException { - RequestItemAuthor author = null; + List authors; if (emailMetadata != null) { List vals = itemService.getMetadataByMetadataString(item, emailMetadata); - if (vals.size() > 0) { - String email = vals.iterator().next().getValue(); - String fullname = null; - if (fullNameMetadata != null) { - List nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata); - if (nameVals.size() > 0) { - fullname = nameVals.iterator().next().getValue(); + List nameVals; + if (null != fullNameMetadata) { + nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata); + } else { + nameVals = Collections.EMPTY_LIST; + } + boolean useNames = vals.size() == nameVals.size(); + if (!vals.isEmpty()) { + authors = new ArrayList<>(vals.size()); + for (int authorIndex = 0; authorIndex < vals.size(); authorIndex++) { + String email = vals.get(authorIndex).getValue(); + String fullname = null; + if (useNames) { + fullname = nameVals.get(authorIndex).getValue(); } + + if (StringUtils.isBlank(fullname)) { + fullname = I18nUtil.getMessage( + "org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed", + context); + } + RequestItemAuthor author = new RequestItemAuthor( + fullname, email); + authors.add(author); } - if (StringUtils.isBlank(fullname)) { - fullname = I18nUtil - .getMessage( - "org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed", - context); - } - author = new RequestItemAuthor(fullname, email); - return author; + return authors; + } else { + return Collections.EMPTY_LIST; } } else { // Uses the basic strategy to look for the original submitter - author = super.getRequestItemAuthor(context, item); - // Is the author or his email null, so get the help desk or admin name and email - if (null == author || null == author.getEmail()) { - String email = null; - String name = null; + authors = super.getRequestItemAuthor(context, item); + + // Remove from the list authors that do not have email addresses. + for (RequestItemAuthor author : authors) { + if (null == author.getEmail()) { + authors.remove(author); + } + } + + if (authors.isEmpty()) { // No author email addresses! Fall back //First get help desk name and email - email = DSpaceServicesFactory.getInstance() - .getConfigurationService().getProperty("mail.helpdesk"); - name = DSpaceServicesFactory.getInstance() - .getConfigurationService().getProperty("mail.helpdesk.name"); + String email = configurationService.getProperty("mail.helpdesk"); + String name = configurationService.getProperty("mail.helpdesk.name"); // If help desk mail is null get the mail and name of admin if (email == null) { - email = DSpaceServicesFactory.getInstance() - .getConfigurationService().getProperty("mail.admin"); - name = DSpaceServicesFactory.getInstance() - .getConfigurationService().getProperty("mail.admin.name"); + email = configurationService.getProperty("mail.admin"); + name = configurationService.getProperty("mail.admin.name"); } - author = new RequestItemAuthor(name, email); + authors.add(new RequestItemAuthor(name, email)); } + return authors; } - return author; } - public void setEmailMetadata(String emailMetadata) { + public void setEmailMetadata(@NonNull String emailMetadata) { this.emailMetadata = emailMetadata; } - public void setFullNameMetadata(String fullNameMetadata) { + public void setFullNameMetadata(@NonNull String fullNameMetadata) { this.fullNameMetadata = fullNameMetadata; } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemServiceImpl.java index 7f3d086c0305..b915cfedd346 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemServiceImpl.java @@ -9,37 +9,54 @@ import java.sql.SQLException; import java.util.Date; +import java.util.Iterator; +import java.util.List; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.requestitem.dao.RequestItemDAO; import org.dspace.app.requestitem.service.RequestItemService; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; import org.dspace.content.Item; +import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.core.LogHelper; import org.dspace.core.Utils; import org.springframework.beans.factory.annotation.Autowired; /** * Service implementation for the RequestItem object. - * This class is responsible for all business logic calls for the RequestItem object and is autowired by spring. + * This class is responsible for all business logic calls for the RequestItem + * object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com */ public class RequestItemServiceImpl implements RequestItemService { - private final Logger log = org.apache.logging.log4j.LogManager.getLogger(RequestItemServiceImpl.class); + private final Logger log = LogManager.getLogger(); @Autowired(required = true) protected RequestItemDAO requestItemDAO; + @Autowired(required = true) + protected AuthorizeService authorizeService; + + @Autowired(required = true) + protected ResourcePolicyService resourcePolicyService; + protected RequestItemServiceImpl() { } @Override - public String createRequest(Context context, Bitstream bitstream, Item item, boolean allFiles, String reqEmail, - String reqName, String reqMessage) throws SQLException { + public String createRequest(Context context, Bitstream bitstream, Item item, + boolean allFiles, String reqEmail, String reqName, String reqMessage) + throws SQLException { RequestItem requestItem = requestItemDAO.create(context, new RequestItem()); requestItem.setToken(Utils.generateHexKey()); @@ -53,13 +70,17 @@ public String createRequest(Context context, Bitstream bitstream, Item item, boo requestItemDAO.save(context, requestItem); - if (log.isDebugEnabled()) { - log.debug("Created requestitem_token " + requestItem.getID() - + " with token " + requestItem.getToken() + "\""); - } + log.debug("Created RequestItem with ID {} and token {}", + requestItem::getID, requestItem::getToken); return requestItem.getToken(); } + @Override + public List findAll(Context context) + throws SQLException { + return requestItemDAO.findAll(context, RequestItem.class); + } + @Override public RequestItem findByToken(Context context, String token) { try { @@ -70,6 +91,11 @@ public RequestItem findByToken(Context context, String token) { } } + @Override + public Iterator findByItem(Context context, Item item) throws SQLException { + return requestItemDAO.findByItem(context, item); + } + @Override public void update(Context context, RequestItem requestItem) { try { @@ -78,4 +104,28 @@ public void update(Context context, RequestItem requestItem) { log.error(e.getMessage()); } } + + @Override + public void delete(Context context, RequestItem requestItem) { + log.debug(LogHelper.getHeader(context, "delete_itemrequest", "request_id={}"), + requestItem.getID()); + try { + requestItemDAO.delete(context, requestItem); + } catch (SQLException e) { + log.error(e.getMessage()); + } + } + + @Override + public boolean isRestricted(Context context, DSpaceObject o) + throws SQLException { + List policies = authorizeService + .getPoliciesActionFilter(context, o, Constants.READ); + for (ResourcePolicy rp : policies) { + if (resourcePolicyService.isDateValid(rp)) { + return false; + } + } + return true; + } } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java index 2708c24ba9fa..6cfeee442600 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemSubmitterStrategy.java @@ -8,10 +8,13 @@ package org.dspace.app.requestitem; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.springframework.lang.NonNull; /** * Basic strategy that looks to the original submitter. @@ -19,26 +22,27 @@ * @author Andrea Bollini */ public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor { - public RequestItemSubmitterStrategy() { } /** - * Returns the submitter of an Item as RequestItemAuthor or null if the - * Submitter is deleted. + * Returns the submitter of an Item as RequestItemAuthor or an empty List if + * the Submitter is deleted. * - * @return The submitter of the item or null if the submitter is deleted + * @return The submitter of the item or empty List if the submitter is deleted * @throws SQLException if database error */ @Override - public RequestItemAuthor getRequestItemAuthor(Context context, Item item) + @NonNull + public List getRequestItemAuthor(Context context, Item item) throws SQLException { EPerson submitter = item.getSubmitter(); - RequestItemAuthor author = null; + List authors = new ArrayList<>(1); if (null != submitter) { - author = new RequestItemAuthor( - submitter.getFullName(), submitter.getEmail()); + RequestItemAuthor author = new RequestItemAuthor( + submitter.getFullName(), submitter.getEmail()); + authors.add(author); } - return author; + return authors; } } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/dao/RequestItemDAO.java b/dspace-api/src/main/java/org/dspace/app/requestitem/dao/RequestItemDAO.java index 74caa16d0e86..b36ae58e0ca1 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/dao/RequestItemDAO.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/dao/RequestItemDAO.java @@ -8,20 +8,32 @@ package org.dspace.app.requestitem.dao; import java.sql.SQLException; +import java.util.Iterator; import org.dspace.app.requestitem.RequestItem; +import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.core.GenericDAO; /** * Database Access Object interface class for the RequestItem object. - * The implementation of this class is responsible for all database calls for the RequestItem object and is autowired - * by spring - * This class should only be accessed from a single service and should never be exposed outside of the API + * The implementation of this class is responsible for all database calls for + * the RequestItem object and is autowired by Spring. + * This class should only be accessed from a single service and should never be + * exposed outside of the API. * * @author kevinvandevelde at atmire.com */ public interface RequestItemDAO extends GenericDAO { - + /** + * Fetch a request named by its unique token (passed in emails). + * + * @param context the current DSpace context. + * @param token uniquely identifies the request. + * @return the found request (or {@code null}?) + * @throws SQLException passed through. + */ public RequestItem findByToken(Context context, String token) throws SQLException; + + public Iterator findByItem(Context context, Item item) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java index 351f40ae139a..008174ded88c 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/dao/impl/RequestItemDAOImpl.java @@ -8,6 +8,8 @@ package org.dspace.app.requestitem.dao.impl; import java.sql.SQLException; +import java.util.Iterator; +import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Root; @@ -15,12 +17,13 @@ import org.dspace.app.requestitem.RequestItem; import org.dspace.app.requestitem.RequestItem_; import org.dspace.app.requestitem.dao.RequestItemDAO; +import org.dspace.content.Item; import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; /** * Hibernate implementation of the Database Access Object interface class for the RequestItem object. - * This class is responsible for all database calls for the RequestItem object and is autowired by spring + * This class is responsible for all database calls for the RequestItem object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -37,8 +40,12 @@ public RequestItem findByToken(Context context, String token) throws SQLExceptio Root requestItemRoot = criteriaQuery.from(RequestItem.class); criteriaQuery.select(requestItemRoot); criteriaQuery.where(criteriaBuilder.equal(requestItemRoot.get(RequestItem_.token), token)); - return uniqueResult(context, criteriaQuery, false, RequestItem.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, RequestItem.class); + } + @Override + public Iterator findByItem(Context context, Item item) throws SQLException { + Query query = createQuery(context, "FROM RequestItem WHERE item_id= :uuid"); + query.setParameter("uuid", item.getID()); + return iterate(query); } - - } diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java b/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java new file mode 100644 index 000000000000..fa7c15b23060 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/package-info.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +/** + * Feature for conveying a request that materials forbidden to the requester + * by resource policy be made available by other means. The request will be + * e-mailed to a responsible party for consideration and action. Find details + * in the user documentation under the rubric "Request a Copy". + * + *

Mailing is handled by {@link RequestItemEmailNotifier}. Responsible + * parties are represented by {@link RequestItemAuthor} + * + *

This package includes several "strategy" classes which discover + * responsible parties in various ways. See + * {@link RequestItemSubmitterStrategy} and the classes which extend it, and + * others which implement {@link RequestItemAuthorExtractor}. A strategy class + * must be configured and identified as {@link requestItemAuthorExtractor} + * (note capitalization) for injection into code which requires Request + * a Copy services. + */ +package org.dspace.app.requestitem; diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/service/RequestItemService.java b/dspace-api/src/main/java/org/dspace/app/requestitem/service/RequestItemService.java index 43e1a0201af2..efac3b18bc7c 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/service/RequestItemService.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/service/RequestItemService.java @@ -8,16 +8,19 @@ package org.dspace.app.requestitem.service; import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; import org.dspace.app.requestitem.RequestItem; import org.dspace.content.Bitstream; +import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.core.Context; /** * Service interface class for the RequestItem object. - * The implementation of this class is responsible for all business logic calls for the RequestItem object and is - * autowired by spring + * The implementation of this class is responsible for all business logic calls + * for the RequestItem object and is autowired by Spring. * * @author kevinvandevelde at atmire.com */ @@ -37,12 +40,37 @@ public interface RequestItemService { * @return the token of the request item * @throws SQLException if database error */ - public String createRequest(Context context, Bitstream bitstream, Item item, boolean allFiles, String reqEmail, - String reqName, String reqMessage) + public String createRequest(Context context, Bitstream bitstream, Item item, + boolean allFiles, String reqEmail, String reqName, String reqMessage) throws SQLException; + /** + * Fetch all item requests. + * + * @param context current DSpace session. + * @return all item requests. + * @throws java.sql.SQLException passed through. + */ + public List findAll(Context context) + throws SQLException; + + /** + * Retrieve a request by its token. + * + * @param context current DSpace session. + * @param token the token identifying the request. + * @return the matching request, or null if not found. + */ public RequestItem findByToken(Context context, String token); + /** + * Retrieve a request based on the item. + * @param context current DSpace session. + * @param item the item to find requests for. + * @return the matching requests, or null if not found. + */ + public Iterator findByItem(Context context, Item item) throws SQLException; + /** * Save updates to the record. Only accept_request, and decision_date are set-able. * @@ -51,5 +79,21 @@ public String createRequest(Context context, Bitstream bitstream, Item item, boo */ public void update(Context context, RequestItem requestItem); + /** + * Remove the record from the database. + * + * @param context current DSpace context. + * @param request record to be removed. + */ + public void delete(Context context, RequestItem request); + /** + * Is there at least one valid READ resource policy for this object? + * @param context current DSpace session. + * @param o the object. + * @return true if a READ policy applies. + * @throws SQLException passed through. + */ + public boolean isRestricted(Context context, DSpaceObject o) + throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java b/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java index 596bd6364384..ead725e842c4 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java @@ -31,6 +31,7 @@ import org.dspace.app.sherpa.v2.SHERPAUtils; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.annotation.Cacheable; /** * SHERPAService is responsible for making the HTTP call to the SHERPA v2 API @@ -43,6 +44,7 @@ * @author Kim Shepherd */ public class SHERPAService { + private CloseableHttpClient client = null; private int maxNumberOfTries; @@ -73,6 +75,7 @@ public SHERPAService() { /** * Complete initialization of the Bean. */ + @SuppressWarnings("unused") @PostConstruct private void init() { // Get endoint and API key from configuration @@ -90,6 +93,7 @@ private void init() { * @param query ISSN string to pass in an "issn equals" API query * @return SHERPAResponse containing an error or journal policies */ + @Cacheable(key = "#query", cacheNames = "sherpa.searchByJournalISSN") public SHERPAResponse searchByJournalISSN(String query) { return performRequest("publication", "issn", "equals", query, 0, 1); } @@ -412,4 +416,5 @@ public void setSleepBetweenTimeouts(long sleepBetweenTimeouts) { public void setTimeout(int timeout) { this.timeout = timeout; } -} + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheEvictService.java b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheEvictService.java new file mode 100644 index 000000000000..94ecfb5e213d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheEvictService.java @@ -0,0 +1,71 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.sherpa.cache; + +import java.util.Objects; +import java.util.Set; + +import org.dspace.app.sherpa.submit.SHERPASubmitService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.springframework.cache.CacheManager; + +/** + * This service is responsible to deal with the SherpaService cache. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class SherpaCacheEvictService { + + // The cache that is managed by this service. + static final String CACHE_NAME = "sherpa.searchByJournalISSN"; + + private CacheManager cacheManager; + + private SHERPASubmitService sherpaSubmitService; + + /** + * Remove immediately from the cache all the response that are related to a specific item + * extracting the ISSNs from the item + * + * @param context The DSpace context + * @param item an Item + */ + public void evictCacheValues(Context context, Item item) { + Set ISSNs = sherpaSubmitService.getISSNs(context, item); + for (String issn : ISSNs) { + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(issn); + } + } + + /** + * Invalidate immediately the Sherpa cache + */ + public void evictAllCacheValues() { + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).invalidate(); + } + + /** + * Set the reference to the cacheManager + * + * @param cacheManager + */ + public void setCacheManager(CacheManager cacheManager) { + this.cacheManager = cacheManager; + } + + /** + * Set the reference to the SherpaSubmitService + * + * @param sherpaSubmitService + */ + public void setSherpaSubmitService(SHERPASubmitService sherpaSubmitService) { + this.sherpaSubmitService = sherpaSubmitService; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheLogger.java b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheLogger.java new file mode 100644 index 000000000000..e84fb7775ae2 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheLogger.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.sherpa.cache; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.CacheEventListener; + +/** + * This is a EHCache listner responsible for logging sherpa cache events. It is + * bound to the sherpa cache via the dspace/config/ehcache.xml file. We need a + * dedicated Logger for each cache as the CacheEvent doesn't include details + * about where the event occur + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + * + */ +public class SherpaCacheLogger implements CacheEventListener { + + private static final Logger log = LogManager.getLogger(SherpaCacheLogger.class); + + @Override + public void onEvent(CacheEvent cacheEvent) { + log.debug("Sherpa Cache Event Type: {} | Key: {} ", + cacheEvent.getType(), cacheEvent.getKey()); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java b/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java index fdf5b3c16730..b795c8a2b2d2 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java @@ -9,7 +9,6 @@ import java.util.Iterator; import java.util.LinkedHashSet; -import java.util.LinkedList; import java.util.List; import java.util.Set; @@ -18,7 +17,7 @@ import org.dspace.app.sherpa.v2.SHERPAResponse; import org.dspace.content.Item; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; /** * SHERPASubmitService is @@ -63,19 +62,19 @@ public void setSherpaService(SHERPAService sherpaService) { * issnItemExtractor(s) in the SHERPA spring configuration. * The ISSNs are not validated with a regular expression or other rules - any values * extracted will be included in API queries. + * Return the first not empty response from Sherpa * @see "dspace-dspace-addon-sherpa-configuration-services.xml" * @param context DSpace context * @param item DSpace item containing ISSNs to be checked * @return SHERPA v2 API response (policy data) */ - public List searchRelatedJournals(Context context, Item item) { + public SHERPAResponse searchRelatedJournals(Context context, Item item) { Set issns = getISSNs(context, item); if (issns == null || issns.size() == 0) { return null; } else { // SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead Iterator issnIterator = issns.iterator(); - List responses = new LinkedList<>(); while (issnIterator.hasNext()) { String issn = issnIterator.next(); SHERPAResponse response = sherpaService.searchByJournalISSN(issn); @@ -83,14 +82,13 @@ public List searchRelatedJournals(Context context, Item item) { // Continue with loop log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn + ": " + response.getMessage()); + return response; + } else if (!response.getJournals().isEmpty()) { + // return this response, if it is not empty + return response; } - // Store this response, even if it has an error (useful for UI reporting) - responses.add(response); } - if (responses.isEmpty()) { - responses.add(new SHERPAResponse("SHERPA ROMeO lookup failed")); - } - return responses; + return new SHERPAResponse(); } } @@ -115,7 +113,7 @@ public SHERPAResponse searchRelatedJournalsByISSN(String issn) { public Set getISSNs(Context context, Item item) { Set issns = new LinkedHashSet(); if (configuration.getIssnItemExtractors() == null) { - log.warn(LogManager.getHeader(context, "searchRelatedJournals", + log.warn(LogHelper.getHeader(context, "searchRelatedJournals", "no issnItemExtractors defined")); return null; } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAEmbargo.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAEmbargo.java new file mode 100644 index 000000000000..c6a0bb79428f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAEmbargo.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.sherpa.v2; + +import java.io.Serializable; + +/** + * Model class for the Embargo of SHERPAv2 API (JSON) + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class SHERPAEmbargo implements Serializable { + + private static final long serialVersionUID = 6140668058547523656L; + + private int amount; + private String units; + + public SHERPAEmbargo(int amount, String units) { + this.amount = amount; + this.units = units; + } + + public int getAmount() { + return amount; + } + + public void setAmount(int amount) { + this.amount = amount; + } + + public String getUnits() { + return units; + } + + public void setUnits(String units) { + this.units = units; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java index b668dbd92715..8728eb1a798d 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java @@ -7,6 +7,7 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; import java.util.List; /** @@ -21,7 +22,7 @@ * * @author Kim Shepherd */ -public class SHERPAJournal { +public class SHERPAJournal implements Serializable { private List titles; private String url; diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java index ec45a29ce7c5..85d5f8960aed 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java @@ -7,25 +7,31 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; import java.util.List; /** * Plain java representation of a SHERPA Permitted Version object, based on SHERPA API v2 responses. * * In a SHERPA search for journal deposit policies, this data is contained within a publisher policy. - * Each permitted version is for a particular article version (eg. submitted, accepted, published) and contains + * Each permitted version is for a particular article version (e.g. submitted, accepted, published) and contains: * - * A list of general conditions / terms for deposit of this version of work - * A list of allowed locations (eg. institutional repository, personal homepage, non-commercial repository) - * A list of prerequisite conditions for deposit (eg. attribution, linking to published version) - * A list of required licences for the deposited work (eg. CC-BY-NC) - * Embargo requirements, if any + *

    + *
  • A list of general conditions / terms for deposit of this version of work
  • + *
  • A list of allowed locations (e.g. institutional repository, personal homepage, non-commercial repository)
  • + *
  • A list of prerequisite conditions for deposit (e.g. attribution, linking to published version)
  • + *
  • A list of required licenses for the deposited work (e.g. CC-BY-NC)
  • + *
  • Embargo requirements, if any
  • + *
* - * This class also has some helper data for labels, which can be used with i18n when displaying policy information + * This class also has some helper data for labels, which can be used with i18n + * when displaying policy information. * * @see SHERPAPublisherPolicy */ -public class SHERPAPermittedVersion { +public class SHERPAPermittedVersion implements Serializable { + + private static final long serialVersionUID = 4992181606327727442L; // Version (submitted, accepted, published) private String articleVersion; @@ -44,11 +50,6 @@ public class SHERPAPermittedVersion { // Embargo private SHERPAEmbargo embargo; - protected class SHERPAEmbargo { - String units; - int amount; - } - public String getArticleVersion() { return articleVersion; } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java index 0097ec2fb3bc..ee1491ed8b1a 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java @@ -7,6 +7,8 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; + /** * Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses. * @@ -18,7 +20,7 @@ * @see SHERPAJournal * @see SHERPAPublisherResponse */ -public class SHERPAPublisher { +public class SHERPAPublisher implements Serializable { private String name = null; private String relationshipType; private String country; diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java index 2a04564e28f9..3e76c5cd37ce 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java @@ -7,6 +7,7 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; import java.util.List; import java.util.Map; @@ -22,7 +23,7 @@ * @see SHERPAJournal * @see SHERPAPermittedVersion */ -public class SHERPAPublisherPolicy { +public class SHERPAPublisherPolicy implements Serializable { private int id; private boolean openAccessPermitted; diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherResponse.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherResponse.java index f109b2e67776..ac71c6e84418 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherResponse.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherResponse.java @@ -10,7 +10,8 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import java.util.LinkedList; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.List; import org.apache.logging.log4j.LogManager; @@ -74,7 +75,7 @@ public SHERPAPublisherResponse(InputStream input, SHERPAFormat format) throws IO * @param jsonData - the JSON input stream from the API result response body */ private void parseJSON(InputStream jsonData) throws IOException { - InputStreamReader streamReader = new InputStreamReader(jsonData); + InputStreamReader streamReader = new InputStreamReader(jsonData, StandardCharsets.UTF_8); JSONTokener jsonTokener = new JSONTokener(streamReader); JSONObject httpResponse; try { @@ -86,7 +87,7 @@ private void parseJSON(InputStream jsonData) throws IOException { // parsing the full journal / policy responses if (items.length() > 0) { metadata = new SHERPASystemMetadata(); - this.publishers = new LinkedList<>(); + this.publishers = new ArrayList<>(); // Iterate search result items for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) { SHERPAPublisher sherpaPublisher = new SHERPAPublisher(); diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java index 3134ad013c47..83dd1e0d3c3d 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java @@ -10,12 +10,15 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.LinkedList; +import java.util.Date; import java.util.List; import java.util.Map; import java.util.TreeMap; +import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONArray; @@ -33,7 +36,10 @@ * @author Kim Shepherd * */ -public class SHERPAResponse { +public class SHERPAResponse implements Serializable { + + private static final long serialVersionUID = 2732963970169240597L; + // Is this response to be treated as an error? private boolean error; @@ -52,6 +58,9 @@ public class SHERPAResponse { // SHERPA URI (the human page version of this API response) private String uri; + @JsonIgnore + private Date retrievalTime = new Date(); + // Format enum - currently only JSON is supported public enum SHERPAFormat { JSON, XML @@ -71,6 +80,11 @@ public SHERPAResponse(InputStream input, SHERPAFormat format) throws IOException } } + /** + * Create an empty SHERPAResponse representation + */ + public SHERPAResponse() {} + /** * Parse the SHERPA v2 API JSON and construct Romeo policy data for display * This method does not return a value, but rather populates the metadata and journals objects @@ -78,7 +92,7 @@ public SHERPAResponse(InputStream input, SHERPAFormat format) throws IOException * @param jsonData - the JSON input stream from the API result response body */ private void parseJSON(InputStream jsonData) throws IOException { - InputStreamReader streamReader = new InputStreamReader(jsonData); + InputStreamReader streamReader = new InputStreamReader(jsonData, StandardCharsets.UTF_8); JSONTokener jsonTokener = new JSONTokener(streamReader); JSONObject httpResponse; try { @@ -90,10 +104,10 @@ private void parseJSON(InputStream jsonData) throws IOException { // - however, we only ever want one result since we're passing an "equals ISSN" query if (items.length() > 0) { metadata = new SHERPASystemMetadata(); - this.journals = new LinkedList<>(); + this.journals = new ArrayList<>(); // Iterate search result items for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) { - List sherpaPublishers = new LinkedList<>(); + List sherpaPublishers = new ArrayList<>(); List policies = new ArrayList<>(); SHERPAPublisher sherpaPublisher = new SHERPAPublisher(); SHERPAJournal sherpaJournal = new SHERPAJournal(); @@ -289,7 +303,7 @@ private SHERPAJournal parseJournal(JSONObject item, String publisherName) { // Is the item in DOAJ? if (item.has("listed_in_doaj")) { - sherpaJournal.setInDOAJ(("yes".equals(item.getString("listed_in_doaj")))); + sherpaJournal.setInDOAJ("yes".equals(item.getString("listed_in_doaj"))); } return sherpaJournal; @@ -403,7 +417,6 @@ private SHERPAPermittedVersion parsePermittedVersion(JSONObject permitted, int i // published = pdfversion // These strings can be used to construct i18n messages. String articleVersion = "unknown"; - String versionLabel = "Unknown"; // Each 'permitted OA' can actually refer to multiple versions if (permitted.has("article_version")) { @@ -480,6 +493,12 @@ private SHERPAPermittedVersion parsePermittedVersion(JSONObject permitted, int i } permittedVersion.setLicenses(sherpaLicenses); + if (permitted.has("embargo")) { + JSONObject embargo = permitted.getJSONObject("embargo"); + SHERPAEmbargo SHERPAEmbargo = new SHERPAEmbargo(embargo.getInt("amount"), embargo.getString("units")); + permittedVersion.setEmbargo(SHERPAEmbargo); + } + return permittedVersion; } @@ -543,4 +562,8 @@ public List getJournals() { public SHERPASystemMetadata getMetadata() { return metadata; } + + public Date getRetrievalTime() { + return retrievalTime; + } } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java index 2a807940bb61..65b07c181131 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java @@ -7,6 +7,8 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; + /** * Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses. * @@ -18,7 +20,7 @@ * * @author Kim Shepherd */ -public class SHERPASystemMetadata { +public class SHERPASystemMetadata implements Serializable { private int id; private String uri; diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java index 5057477e3171..90962d12aa75 100644 --- a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java +++ b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java @@ -7,18 +7,10 @@ */ package org.dspace.app.sitemap; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.InputStreamReader; -import java.io.UnsupportedEncodingException; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLEncoder; import java.sql.SQLException; import java.util.Date; -import java.util.Iterator; import java.util.List; import org.apache.commons.cli.CommandLine; @@ -29,20 +21,17 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.content.Collection; -import org.dspace.content.Community; -import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SearchUtils; @@ -68,6 +57,7 @@ public class GenerateSitemaps { private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); private static final SearchService searchService = SearchUtils.getSearchService(); + private static final int PAGE_SIZE = 100; /** * Default constructor @@ -87,11 +77,6 @@ public static void main(String[] args) throws Exception { "do not generate sitemaps.org protocol sitemap"); options.addOption("b", "no_htmlmap", false, "do not generate a basic HTML sitemap"); - options.addOption("a", "ping_all", false, - "ping configured search engines"); - options - .addOption("p", "ping", true, - "ping specified search engine URL"); options .addOption("d", "delete", false, "delete sitemaps dir and its contents"); @@ -116,14 +101,13 @@ public static void main(String[] args) throws Exception { } /* - * Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage + * Sanity check -- if no sitemap generation or deletion, print usage */ if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b') && line.hasOption('s') && !line.hasOption('g') - && !line.hasOption('m') && !line.hasOption('y') - && !line.hasOption('p')) { + && !line.hasOption('m') && !line.hasOption('y')) { System.err - .println("Nothing to do (no sitemap to generate, no search engines to ping)"); + .println("Nothing to do (no sitemap to generate)"); hf.printHelp(usage, options); System.exit(1); } @@ -137,20 +121,6 @@ public static void main(String[] args) throws Exception { deleteSitemaps(); } - if (line.hasOption('a')) { - pingConfiguredSearchEngines(); - } - - if (line.hasOption('p')) { - try { - pingSearchEngine(line.getOptionValue('p')); - } catch (MalformedURLException me) { - System.err - .println("Bad search engine URL (include all except sitemap URL)"); - System.exit(1); - } - } - System.exit(0); } @@ -189,7 +159,10 @@ public static void deleteSitemaps() throws IOException { */ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException { String uiURLStem = configurationService.getProperty("dspace.ui.url"); - String sitemapStem = uiURLStem + "/sitemap"; + if (!uiURLStem.endsWith("/")) { + uiURLStem = uiURLStem + '/'; + } + String sitemapStem = uiURLStem + "sitemap"; File outputDir = new File(configurationService.getProperty("sitemap.dir")); if (!outputDir.exists() && !outputDir.mkdir()) { @@ -208,171 +181,113 @@ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) } Context c = new Context(Context.Mode.READ_ONLY); + int offset = 0; + long commsCount = 0; + long collsCount = 0; + long itemsCount = 0; - List comms = communityService.findAll(c); - - for (Community comm : comms) { - String url = uiURLStem + "/communities/" + comm.getID(); - - if (makeHTMLMap) { - html.addURL(url, null); - } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, null); - } - - c.uncacheEntity(comm); - } - - List colls = collectionService.findAll(c); - - for (Collection coll : colls) { - String url = uiURLStem + "/collections/" + coll.getID(); - - if (makeHTMLMap) { - html.addURL(url, null); - } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, null); - } - - c.uncacheEntity(coll); - } - - Iterator allItems = itemService.findAll(c); - int itemCount = 0; - - while (allItems.hasNext()) { - Item i = allItems.next(); - - DiscoverQuery entityQuery = new DiscoverQuery(); - entityQuery.setQuery("search.uniqueid:\"Item-" + i.getID() + "\" and entityType:*"); - entityQuery.addSearchField("entityType"); - - try { - DiscoverResult discoverResult = searchService.search(c, entityQuery); - - String url; - if (CollectionUtils.isNotEmpty(discoverResult.getIndexableObjects()) - && CollectionUtils.isNotEmpty(discoverResult.getSearchDocument( - discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType")) - && StringUtils.isNotBlank(discoverResult.getSearchDocument( - discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0)) - ) { - url = uiURLStem + "/entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument( - discoverResult.getIndexableObjects().get(0)) - .get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID(); - } else { - url = uiURLStem + "/items/" + i.getID(); + try { + DiscoverQuery discoveryQuery = new DiscoverQuery(); + discoveryQuery.setMaxResults(PAGE_SIZE); + discoveryQuery.setQuery("search.resourcetype:Community"); + do { + discoveryQuery.setStart(offset); + DiscoverResult discoverResult = searchService.search(c, discoveryQuery); + List docs = discoverResult.getIndexableObjects(); + commsCount = discoverResult.getTotalSearchResults(); + + for (IndexableObject doc : docs) { + String url = uiURLStem + "communities/" + doc.getID(); + c.uncacheEntity(doc.getIndexedObject()); + + if (makeHTMLMap) { + html.addURL(url, null); + } + if (makeSitemapOrg) { + sitemapsOrg.addURL(url, null); + } } - Date lastMod = i.getLastModified(); - - if (makeHTMLMap) { - html.addURL(url, lastMod); + offset += PAGE_SIZE; + } while (offset < commsCount); + + offset = 0; + discoveryQuery = new DiscoverQuery(); + discoveryQuery.setMaxResults(PAGE_SIZE); + discoveryQuery.setQuery("search.resourcetype:Collection"); + do { + discoveryQuery.setStart(offset); + DiscoverResult discoverResult = searchService.search(c, discoveryQuery); + List docs = discoverResult.getIndexableObjects(); + collsCount = discoverResult.getTotalSearchResults(); + + for (IndexableObject doc : docs) { + String url = uiURLStem + "collections/" + doc.getID(); + c.uncacheEntity(doc.getIndexedObject()); + + if (makeHTMLMap) { + html.addURL(url, null); + } + if (makeSitemapOrg) { + sitemapsOrg.addURL(url, null); + } } - if (makeSitemapOrg) { - sitemapsOrg.addURL(url, lastMod); + offset += PAGE_SIZE; + } while (offset < collsCount); + + offset = 0; + discoveryQuery = new DiscoverQuery(); + discoveryQuery.setMaxResults(PAGE_SIZE); + discoveryQuery.setQuery("search.resourcetype:Item"); + discoveryQuery.addSearchField("search.entitytype"); + do { + + discoveryQuery.setStart(offset); + DiscoverResult discoverResult = searchService.search(c, discoveryQuery); + List docs = discoverResult.getIndexableObjects(); + itemsCount = discoverResult.getTotalSearchResults(); + + for (IndexableObject doc : docs) { + String url; + List entityTypeFieldValues = discoverResult.getSearchDocument(doc).get(0) + .getSearchFieldValues("search.entitytype"); + if (CollectionUtils.isNotEmpty(entityTypeFieldValues)) { + url = uiURLStem + "entities/" + StringUtils.lowerCase(entityTypeFieldValues.get(0)) + "/" + + doc.getID(); + } else { + url = uiURLStem + "items/" + doc.getID(); + } + Date lastMod = doc.getLastModified(); + c.uncacheEntity(doc.getIndexedObject()); + + if (makeHTMLMap) { + html.addURL(url, null); + } + if (makeSitemapOrg) { + sitemapsOrg.addURL(url, null); + } } - } catch (SearchServiceException e) { - log.error("Failed getting entitytype through solr for item " + i.getID() + ": " + e.getMessage()); - } - - c.uncacheEntity(i); - - itemCount++; - } - - if (makeHTMLMap) { - int files = html.finish(); - log.info(LogManager.getHeader(c, "write_sitemap", - "type=html,num_files=" + files + ",communities=" - + comms.size() + ",collections=" + colls.size() - + ",items=" + itemCount)); - } - - if (makeSitemapOrg) { - int files = sitemapsOrg.finish(); - log.info(LogManager.getHeader(c, "write_sitemap", - "type=html,num_files=" + files + ",communities=" - + comms.size() + ",collections=" + colls.size() - + ",items=" + itemCount)); - } - - c.abort(); - } - - /** - * Ping all search engines configured in {@code dspace.cfg}. - * - * @throws UnsupportedEncodingException theoretically should never happen - */ - public static void pingConfiguredSearchEngines() - throws UnsupportedEncodingException { - String[] engineURLs = configurationService - .getArrayProperty("sitemap.engineurls"); - - if (ArrayUtils.isEmpty(engineURLs)) { - log.warn("No search engine URLs configured to ping"); - return; - } - - for (int i = 0; i < engineURLs.length; i++) { - try { - pingSearchEngine(engineURLs[i]); - } catch (MalformedURLException me) { - log.warn("Bad search engine URL in configuration: " - + engineURLs[i]); - } - } - } - - /** - * Ping the given search engine. - * - * @param engineURL Search engine URL minus protocol etc, e.g. - * {@code www.google.com} - * @throws MalformedURLException if the passed in URL is malformed - * @throws UnsupportedEncodingException theoretically should never happen - */ - public static void pingSearchEngine(String engineURL) - throws MalformedURLException, UnsupportedEncodingException { - // Set up HTTP proxy - if ((StringUtils.isNotBlank(configurationService.getProperty("http.proxy.host"))) - && (StringUtils.isNotBlank(configurationService.getProperty("http.proxy.port")))) { - System.setProperty("proxySet", "true"); - System.setProperty("proxyHost", configurationService - .getProperty("http.proxy.host")); - System.getProperty("proxyPort", configurationService - .getProperty("http.proxy.port")); - } + offset += PAGE_SIZE; + } while (offset < itemsCount); - String sitemapURL = configurationService.getProperty("dspace.ui.url") - + "/sitemap"; - - URL url = new URL(engineURL + URLEncoder.encode(sitemapURL, "UTF-8")); - - try { - HttpURLConnection connection = (HttpURLConnection) url - .openConnection(); - - BufferedReader in = new BufferedReader(new InputStreamReader( - connection.getInputStream())); - - String inputLine; - StringBuffer resp = new StringBuffer(); - while ((inputLine = in.readLine()) != null) { - resp.append(inputLine).append("\n"); + if (makeHTMLMap) { + int files = html.finish(); + log.info(LogHelper.getHeader(c, "write_sitemap", + "type=html,num_files=" + files + ",communities=" + + commsCount + ",collections=" + collsCount + + ",items=" + itemsCount)); } - in.close(); - if (connection.getResponseCode() == 200) { - log.info("Pinged " + url.toString() + " successfully"); - } else { - log.warn("Error response pinging " + url.toString() + ":\n" - + resp); + if (makeSitemapOrg) { + int files = sitemapsOrg.finish(); + log.info(LogHelper.getHeader(c, "write_sitemap", + "type=html,num_files=" + files + ",communities=" + + commsCount + ",collections=" + collsCount + + ",items=" + itemsCount)); } - } catch (IOException e) { - log.warn("Error pinging " + url.toString(), e); + } catch (SearchServiceException e) { + throw new RuntimeException(e); + } finally { + c.abort(); } } } diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java b/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java index 3ec4ca823966..53f402d33157 100644 --- a/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java +++ b/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java @@ -86,7 +86,7 @@ public String getTrailingBoilerPlate() { @Override public String getURLText(String url, Date lastMod) { - StringBuffer urlText = new StringBuffer(); + StringBuilder urlText = new StringBuilder(); urlText.append("").append(url).append(""); if (lastMod != null) { diff --git a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCli.java b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCli.java new file mode 100644 index 000000000000..f901c9ca569e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCli.java @@ -0,0 +1,175 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.solrdatabaseresync; + +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD; +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Calendar; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +import org.apache.commons.cli.ParseException; +import org.apache.logging.log4j.Logger; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.dspace.core.Context; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.IndexingService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.SolrSearchCore; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.util.SolrUtils; +import org.dspace.utils.DSpace; + +/** + * {@link DSpaceRunnable} implementation to update solr items with "predb" status to either: + * - Delete them from solr if they're not present in the database + * - Remove their status if they're present in the database + */ +public class SolrDatabaseResyncCli extends DSpaceRunnable { + /* Log4j logger */ + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrDatabaseResyncCli.class); + + public static final String TIME_UNTIL_REINDEX_PROPERTY = "solr-database-resync.time-until-reindex"; + + private IndexingService indexingService; + private SolrSearchCore solrSearchCore; + private IndexObjectFactoryFactory indexObjectServiceFactory; + private ConfigurationService configurationService; + + private int timeUntilReindex = 0; + private String maxTime; + + @Override + public SolrDatabaseResyncCliScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("solr-database-resync", SolrDatabaseResyncCliScriptConfiguration.class); + } + + public static void runScheduled() throws Exception { + SolrDatabaseResyncCli script = new SolrDatabaseResyncCli(); + script.setup(); + script.internalRun(); + } + + @Override + public void setup() throws ParseException { + indexingService = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName(IndexingService.class.getName(), IndexingService.class); + solrSearchCore = DSpaceServicesFactory.getInstance().getServiceManager() + .getServicesByType(SolrSearchCore.class).get(0); + indexObjectServiceFactory = IndexObjectFactoryFactory.getInstance(); + configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + } + + @Override + public void internalRun() throws Exception { + logInfoAndOut("Starting Item resync of Solr and Database..."); + + timeUntilReindex = getTimeUntilReindex(); + maxTime = getMaxTime(); + + Context context = new Context(); + + try { + context.turnOffAuthorisationSystem(); + performStatusUpdate(context); + } finally { + context.restoreAuthSystemState(); + context.complete(); + } + } + + private void performStatusUpdate(Context context) throws SearchServiceException, SolrServerException, IOException { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setQuery(STATUS_FIELD + ":" + STATUS_FIELD_PREDB); + solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE); + String dateRangeFilter = SearchUtils.LAST_INDEXED_FIELD + ":[* TO " + maxTime + "]"; + logDebugAndOut("Date range filter used; " + dateRangeFilter); + solrQuery.addFilterQuery(dateRangeFilter); + solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD); + solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID); + QueryResponse response = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD); + + if (response != null) { + logInfoAndOut(response.getResults().size() + " items found to process"); + + for (SolrDocument doc : response.getResults()) { + String uuid = (String) doc.getFirstValue(SearchUtils.RESOURCE_ID_FIELD); + String uniqueId = (String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID); + logDebugAndOut("Processing item with UUID: " + uuid); + + Optional indexableObject = Optional.empty(); + try { + indexableObject = indexObjectServiceFactory + .getIndexableObjectFactory(uniqueId).findIndexableObject(context, uuid); + } catch (SQLException e) { + log.warn("An exception occurred when attempting to retrieve item with UUID \"" + uuid + + "\" from the database, removing related solr document", e); + } + + try { + if (indexableObject.isPresent()) { + logDebugAndOut("Item exists in DB, updating solr document"); + updateItem(context, indexableObject.get()); + } else { + logDebugAndOut("Item doesn't exist in DB, removing solr document"); + removeItem(context, uniqueId); + } + } catch (SQLException | IOException e) { + log.error(e.getMessage(), e); + } + } + } + + indexingService.commit(); + } + + private void updateItem(Context context, IndexableObject indexableObject) throws SolrServerException, IOException { + Map fieldModifier = new HashMap<>(1); + fieldModifier.put("remove", STATUS_FIELD_PREDB); + indexingService.atomicUpdate(context, indexableObject.getUniqueIndexID(), STATUS_FIELD, fieldModifier); + } + + private void removeItem(Context context, String uniqueId) throws IOException, SQLException { + indexingService.unIndexContent(context, uniqueId); + } + + private String getMaxTime() { + Calendar cal = Calendar.getInstance(); + if (timeUntilReindex > 0) { + cal.add(Calendar.MILLISECOND, -timeUntilReindex); + } + return SolrUtils.getDateFormatter().format(cal.getTime()); + } + + private int getTimeUntilReindex() { + return configurationService.getIntProperty(TIME_UNTIL_REINDEX_PROPERTY, 0); + } + + private void logInfoAndOut(String message) { + log.info(message); + System.out.println(message); + } + + private void logDebugAndOut(String message) { + log.debug(message); + System.out.println(message); + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java new file mode 100644 index 000000000000..067c76cce8b3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.solrdatabaseresync; + +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * The {@link ScriptConfiguration} for the {@link SolrDatabaseResyncCli} script. + */ +public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguration { + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + options = new Options(); + } + return options; + } +} diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/CreateStatReport.java b/dspace-api/src/main/java/org/dspace/app/statistics/CreateStatReport.java index 5785d1ee97a7..a7d5c4a66a2d 100644 --- a/dspace-api/src/main/java/org/dspace/app/statistics/CreateStatReport.java +++ b/dspace-api/src/main/java/org/dspace/app/statistics/CreateStatReport.java @@ -64,10 +64,6 @@ public class CreateStatReport { */ private static Context context; - /** - * the config file from which to configure the analyser - */ - /** * Default constructor */ @@ -170,22 +166,19 @@ private static void statMonthly() throws Exception { String myLogDir = null; String myFileTemplate = null; String myConfigFile = null; - StringBuffer myOutFile = null; - Date myStartDate = null; - Date myEndDate = null; boolean myLookUp = false; Calendar start = new GregorianCalendar(calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.getActualMinimum(Calendar.DAY_OF_MONTH)); - myStartDate = start.getTime(); + Date myStartDate = start.getTime(); Calendar end = new GregorianCalendar(calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH), calendar.getActualMaximum(Calendar.DAY_OF_MONTH)); - myEndDate = end.getTime(); + Date myEndDate = end.getTime(); - myOutFile = new StringBuffer(outputLogDirectory); + StringBuilder myOutFile = new StringBuilder(outputLogDirectory); myOutFile.append(outputPrefix); myOutFile.append(calendar.get(Calendar.YEAR)); myOutFile.append("-"); @@ -211,12 +204,11 @@ private static void statGeneral() throws Exception { String myLogDir = null; String myFileTemplate = null; String myConfigFile = null; - StringBuffer myOutFile = null; Date myStartDate = null; Date myEndDate = null; boolean myLookUp = false; - myOutFile = new StringBuffer(outputLogDirectory); + StringBuilder myOutFile = new StringBuilder(outputLogDirectory); myOutFile.append(outputPrefix); myOutFile.append(calendar.get(Calendar.YEAR)); myOutFile.append("-"); @@ -245,9 +237,6 @@ private static void statInitial() throws Exception { String myLogDir = null; String myFileTemplate = null; String myConfigFile = null; - StringBuffer myOutFile = null; - Date myStartDate = null; - Date myEndDate = null; boolean myLookUp = false; Calendar reportEndDate = new GregorianCalendar(calendar.get(Calendar.YEAR), @@ -260,14 +249,14 @@ private static void statInitial() throws Exception { Calendar start = new GregorianCalendar(currentMonth.get(Calendar.YEAR), currentMonth.get(Calendar.MONTH), currentMonth.getActualMinimum(Calendar.DAY_OF_MONTH)); - myStartDate = start.getTime(); + Date myStartDate = start.getTime(); Calendar end = new GregorianCalendar(currentMonth.get(Calendar.YEAR), currentMonth.get(Calendar.MONTH), currentMonth.getActualMaximum(Calendar.DAY_OF_MONTH)); - myEndDate = end.getTime(); + Date myEndDate = end.getTime(); - myOutFile = new StringBuffer(outputLogDirectory); + StringBuilder myOutFile = new StringBuilder(outputLogDirectory); myOutFile.append(outputPrefix); myOutFile.append(currentMonth.get(Calendar.YEAR)); myOutFile.append("-"); @@ -293,11 +282,9 @@ private static void statReportGeneral() throws Exception { String outputPrefix = "report-general-"; String myFormat = "html"; - StringBuffer myInput = null; - StringBuffer myOutput = null; String myMap = null; - myInput = new StringBuffer(outputLogDirectory); + StringBuilder myInput = new StringBuilder(outputLogDirectory); myInput.append(inputPrefix); myInput.append(calendar.get(Calendar.YEAR)); myInput.append("-"); @@ -306,7 +293,7 @@ private static void statReportGeneral() throws Exception { myInput.append(calendar.get(Calendar.DAY_OF_MONTH)); myInput.append(outputSuffix); - myOutput = new StringBuffer(outputReportDirectory); + StringBuilder myOutput = new StringBuilder(outputReportDirectory); myOutput.append(outputPrefix); myOutput.append(calendar.get(Calendar.YEAR)); myOutput.append("-"); @@ -332,8 +319,6 @@ private static void statReportInitial() throws Exception { String outputPrefix = "report-"; String myFormat = "html"; - StringBuffer myInput = null; - StringBuffer myOutput = null; String myMap = null; Calendar reportEndDate = new GregorianCalendar(calendar.get(Calendar.YEAR), @@ -344,14 +329,14 @@ private static void statReportInitial() throws Exception { while (currentMonth.before(reportEndDate)) { - myInput = new StringBuffer(outputLogDirectory); + StringBuilder myInput = new StringBuilder(outputLogDirectory); myInput.append(inputPrefix); myInput.append(currentMonth.get(Calendar.YEAR)); myInput.append("-"); myInput.append(currentMonth.get(Calendar.MONTH) + 1); myInput.append(outputSuffix); - myOutput = new StringBuffer(outputReportDirectory); + StringBuilder myOutput = new StringBuilder(outputReportDirectory); myOutput.append(outputPrefix); myOutput.append(currentMonth.get(Calendar.YEAR)); myOutput.append("-"); @@ -376,18 +361,16 @@ private static void statReportMonthly() throws Exception { String outputPrefix = "report-"; String myFormat = "html"; - StringBuffer myInput = null; - StringBuffer myOutput = null; String myMap = null; - myInput = new StringBuffer(outputLogDirectory); + StringBuilder myInput = new StringBuilder(outputLogDirectory); myInput.append(inputPrefix); myInput.append(calendar.get(Calendar.YEAR)); myInput.append("-"); myInput.append(calendar.get(Calendar.MONTH) + 1); myInput.append(outputSuffix); - myOutput = new StringBuffer(outputReportDirectory); + StringBuilder myOutput = new StringBuilder(outputReportDirectory); myOutput.append(outputPrefix); myOutput.append(calendar.get(Calendar.YEAR)); myOutput.append("-"); diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java b/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java index 0ece47d32d1f..2e4ed69b268e 100644 --- a/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java +++ b/dspace-api/src/main/java/org/dspace/app/statistics/LogAnalyser.java @@ -29,9 +29,13 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; import org.apache.commons.lang3.StringUtils; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.core.Utils; import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.SearchServiceException; @@ -44,6 +48,7 @@ * files. Most input can be configured; use the -help flag for a full list * of usage information. * + *

* The output of this file is plain text and forms an "aggregation" file which * can then be used for display purposes using the related ReportGenerator * class. @@ -167,7 +172,7 @@ public class LogAnalyser { /** * the average number of views per item */ - private static int views = 0; + private static long views = 0; /////////////////////// // regular expressions @@ -236,12 +241,12 @@ public class LogAnalyser { /** * pattern to match commented out lines from the config file */ - private static final Pattern comment = Pattern.compile("^#"); + private static final Pattern COMMENT = Pattern.compile("^#"); /** * pattern to match genuine lines from the config file */ - private static final Pattern real = Pattern.compile("^(.+)=(.+)"); + private static final Pattern REAL = Pattern.compile("^(.+)=(.+)"); /** * pattern to match all search types @@ -337,44 +342,73 @@ public static void main(String[] argv) Date myEndDate = null; boolean myLookUp = false; - // read in our command line options - for (int i = 0; i < argv.length; i++) { - if (argv[i].equals("-log")) { - myLogDir = argv[i + 1]; - } + // Define command line options. + Options options = new Options(); + Option option; - if (argv[i].equals("-file")) { - myFileTemplate = argv[i + 1]; - } + option = Option.builder().longOpt("log").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-cfg")) { - myConfigFile = argv[i + 1]; - } + option = Option.builder().longOpt("file").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-out")) { - myOutFile = argv[i + 1]; - } + option = Option.builder().longOpt("cfg").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-help")) { - LogAnalyser.usage(); - System.exit(0); - } + option = Option.builder().longOpt("out").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-start")) { - myStartDate = parseDate(argv[i + 1]); - } + option = Option.builder().longOpt("help").build(); + options.addOption(option); - if (argv[i].equals("-end")) { - myEndDate = parseDate(argv[i + 1]); - } + option = Option.builder().longOpt("start").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-lookup")) { - myLookUp = true; - } + option = Option.builder().longOpt("end").hasArg().build(); + options.addOption(option); + + option = Option.builder().longOpt("lookup").build(); + options.addOption(option); + + // Parse the command. + DefaultParser cmdParser = new DefaultParser(); + CommandLine cmd = cmdParser.parse(options, argv); + + // Analyze the command. + if (cmd.hasOption("help")) { + LogAnalyser.usage(); + System.exit(0); + } + + if (cmd.hasOption("log")) { + myLogDir = cmd.getOptionValue("log"); + } + + if (cmd.hasOption("file")) { + myFileTemplate = cmd.getOptionValue("file"); + } + + if (cmd.hasOption("cfg")) { + myConfigFile = cmd.getOptionValue("cfg"); } + if (cmd.hasOption("out")) { + myOutFile = cmd.getOptionValue("out"); + } + + if (cmd.hasOption("start")) { + myStartDate = parseDate(cmd.getOptionValue("start")); + } + + if (cmd.hasOption("end")) { + myEndDate = parseDate(cmd.getOptionValue("end")); + } + + myLookUp = cmd.hasOption("lookup"); + // now call the method which actually processes the logs - processLogs(context, myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp); + processLogs(context, myLogDir, myFileTemplate, myConfigFile, myOutFile, + myStartDate, myEndDate, myLookUp); } /** @@ -406,18 +440,18 @@ public static String processLogs(Context context, String myLogDir, startTime = new GregorianCalendar(); //instantiate aggregators - actionAggregator = new HashMap(); - searchAggregator = new HashMap(); - userAggregator = new HashMap(); - itemAggregator = new HashMap(); - archiveStats = new HashMap(); + actionAggregator = new HashMap<>(); + searchAggregator = new HashMap<>(); + userAggregator = new HashMap<>(); + itemAggregator = new HashMap<>(); + archiveStats = new HashMap<>(); //instantiate lists - generalSummary = new ArrayList(); - excludeWords = new ArrayList(); - excludeTypes = new ArrayList(); - excludeChars = new ArrayList(); - itemTypes = new ArrayList(); + generalSummary = new ArrayList<>(); + excludeWords = new ArrayList<>(); + excludeTypes = new ArrayList<>(); + excludeChars = new ArrayList<>(); + itemTypes = new ArrayList<>(); // set the parameters for this analysis setParameters(myLogDir, myFileTemplate, myConfigFile, myOutFile, myStartDate, myEndDate, myLookUp); @@ -529,10 +563,11 @@ public static String processLogs(Context context, String myLogDir, // for each search word add to the aggregator or // increment the aggregator's counter - for (int j = 0; j < words.length; j++) { + for (String word : words) { // FIXME: perhaps aggregators ought to be objects // themselves - searchAggregator.put(words[j], increment(searchAggregator, words[j])); + searchAggregator.put(word, + increment(searchAggregator, word)); } } @@ -591,13 +626,13 @@ public static String processLogs(Context context, String myLogDir, } // do the average views analysis - if ((archiveStats.get("All Items")).intValue() != 0) { + if ((archiveStats.get("All Items")) != 0) { // FIXME: this is dependent on their being a query on the db, which // there might not always be if it becomes configurable - Double avg = Math.ceil( + double avg = Math.ceil( (actionAggregator.get("view_item")).doubleValue() / (archiveStats.get("All Items")).doubleValue()); - views = avg.intValue(); + views = Math.round(avg); } // finally, write the output @@ -672,55 +707,55 @@ public static String createOutput() { Iterator keys = null; // output the number of lines parsed - summary.append("log_lines=" + Integer.toString(lineCount) + "\n"); + summary.append("log_lines=").append(Integer.toString(lineCount)).append("\n"); // output the number of warnings encountered - summary.append("warnings=" + Integer.toString(warnCount) + "\n"); - summary.append("exceptions=" + Integer.toString(excCount) + "\n"); + summary.append("warnings=").append(Integer.toString(warnCount)).append("\n"); + summary.append("exceptions=").append(Integer.toString(excCount)).append("\n"); // set the general summary config up in the aggregator file for (int i = 0; i < generalSummary.size(); i++) { - summary.append("general_summary=" + generalSummary.get(i) + "\n"); + summary.append("general_summary=").append(generalSummary.get(i)).append("\n"); } // output the host name - summary.append("server_name=" + hostName + "\n"); + summary.append("server_name=").append(hostName).append("\n"); // output the service name - summary.append("service_name=" + name + "\n"); + summary.append("service_name=").append(name).append("\n"); // output the date information if necessary SimpleDateFormat sdf = new SimpleDateFormat("dd'/'MM'/'yyyy"); if (startDate != null) { - summary.append("start_date=" + sdf.format(startDate) + "\n"); + summary.append("start_date=").append(sdf.format(startDate)).append("\n"); } else if (logStartDate != null) { - summary.append("start_date=" + sdf.format(logStartDate) + "\n"); + summary.append("start_date=").append(sdf.format(logStartDate)).append("\n"); } if (endDate != null) { - summary.append("end_date=" + sdf.format(endDate) + "\n"); + summary.append("end_date=").append(sdf.format(endDate)).append("\n"); } else if (logEndDate != null) { - summary.append("end_date=" + sdf.format(logEndDate) + "\n"); + summary.append("end_date=").append(sdf.format(logEndDate)).append("\n"); } // write out the archive stats keys = archiveStats.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); - summary.append("archive." + key + "=" + archiveStats.get(key) + "\n"); + summary.append("archive.").append(key).append("=").append(archiveStats.get(key)).append("\n"); } // write out the action aggregation results keys = actionAggregator.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); - summary.append("action." + key + "=" + actionAggregator.get(key) + "\n"); + summary.append("action.").append(key).append("=").append(actionAggregator.get(key)).append("\n"); } // depending on the config settings for reporting on emails output the // login information - summary.append("user_email=" + userEmail + "\n"); + summary.append("user_email=").append(userEmail).append("\n"); int address = 1; keys = userAggregator.keySet().iterator(); @@ -731,9 +766,10 @@ public static String createOutput() { String key = keys.next(); summary.append("user."); if (userEmail.equals("on")) { - summary.append(key + "=" + userAggregator.get(key) + "\n"); + summary.append(key).append("=").append(userAggregator.get(key)).append("\n"); } else if (userEmail.equals("alias")) { - summary.append("Address " + Integer.toString(address++) + "=" + userAggregator.get(key) + "\n"); + summary.append("Address ").append(Integer.toString(address++)) + .append("=").append(userAggregator.get(key)).append("\n"); } } @@ -742,12 +778,13 @@ public static String createOutput() { // the listing there are // output the search word information - summary.append("search_floor=" + searchFloor + "\n"); + summary.append("search_floor=").append(searchFloor).append("\n"); keys = searchAggregator.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); - if ((searchAggregator.get(key)).intValue() >= searchFloor) { - summary.append("search." + key + "=" + searchAggregator.get(key) + "\n"); + if ((searchAggregator.get(key)) >= searchFloor) { + summary.append("search.").append(key).append("=") + .append(searchAggregator.get(key)).append("\n"); } } @@ -759,35 +796,35 @@ public static String createOutput() { // be the same thing. // item viewing information - summary.append("item_floor=" + itemFloor + "\n"); - summary.append("host_url=" + url + "\n"); - summary.append("item_lookup=" + itemLookup + "\n"); + summary.append("item_floor=").append(itemFloor).append("\n"); + summary.append("host_url=").append(url).append("\n"); + summary.append("item_lookup=").append(itemLookup).append("\n"); // write out the item access information keys = itemAggregator.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); - if ((itemAggregator.get(key)).intValue() >= itemFloor) { - summary.append("item." + key + "=" + itemAggregator.get(key) + "\n"); + if ((itemAggregator.get(key)) >= itemFloor) { + summary.append("item.").append(key).append("=") + .append(itemAggregator.get(key)).append("\n"); } } // output the average views per item if (views > 0) { - summary.append("avg_item_views=" + views + "\n"); + summary.append("avg_item_views=").append(views).append("\n"); } // insert the analysis processing time information Calendar endTime = new GregorianCalendar(); long timeInMillis = (endTime.getTimeInMillis() - startTime.getTimeInMillis()); - summary.append("analysis_process_time=" + Long.toString(timeInMillis / 1000) + "\n"); + summary.append("analysis_process_time=") + .append(Long.toString(timeInMillis / 1000)).append("\n"); // finally write the string into the output file - try { - BufferedWriter out = new BufferedWriter(new FileWriter(outFile)); + try (BufferedWriter out = new BufferedWriter(new FileWriter(outFile));) { out.write(summary.toString()); out.flush(); - out.close(); } catch (IOException e) { System.out.println("Unable to write to output file " + outFile); System.exit(0); @@ -891,11 +928,11 @@ public static void setRegex(String fileTemplate) { if (i > 0) { wordRXString.append("|"); } - wordRXString.append(" " + excludeWords.get(i) + " "); + wordRXString.append(" ").append(excludeWords.get(i)).append(" "); wordRXString.append("|"); - wordRXString.append("^" + excludeWords.get(i) + " "); + wordRXString.append("^").append(excludeWords.get(i)).append(" "); wordRXString.append("|"); - wordRXString.append(" " + excludeWords.get(i) + "$"); + wordRXString.append(" ").append(excludeWords.get(i)).append("$"); } wordRXString.append(")"); wordRX = Pattern.compile(wordRXString.toString()); @@ -956,8 +993,8 @@ public static void readConfig(String configFile) throws IOException { // read in the config file and set up our instance variables while ((record = br.readLine()) != null) { // check to see what kind of line we have - Matcher matchComment = comment.matcher(record); - Matcher matchReal = real.matcher(record); + Matcher matchComment = COMMENT.matcher(record); + Matcher matchReal = REAL.matcher(record); // if the line is not a comment and is real, read it in if (!matchComment.matches() && matchReal.matches()) { @@ -968,7 +1005,7 @@ public static void readConfig(String configFile) throws IOException { // read the config values into our instance variables (see // documentation for more info on config params) if (key.equals("general.summary")) { - actionAggregator.put(value, Integer.valueOf(0)); + actionAggregator.put(value, 0); generalSummary.add(value); } @@ -1022,9 +1059,9 @@ public static Integer increment(Map map, String key) { Integer newValue = null; if (map.containsKey(key)) { // FIXME: this seems like a ridiculous way to add Integers - newValue = Integer.valueOf((map.get(key)).intValue() + 1); + newValue = (map.get(key)) + 1; } else { - newValue = Integer.valueOf(1); + newValue = 1; } return newValue; } @@ -1144,17 +1181,17 @@ public static LogLine getLogLine(String line) { if (match.matches()) { // set up a new log line object LogLine logLine = new LogLine(parseDate(match.group(1).trim()), - LogManager.unescapeLogField(match.group(2)).trim(), - LogManager.unescapeLogField(match.group(3)).trim(), - LogManager.unescapeLogField(match.group(4)).trim(), - LogManager.unescapeLogField(match.group(5)).trim()); + LogHelper.unescapeLogField(match.group(2)).trim(), + LogHelper.unescapeLogField(match.group(3)).trim(), + LogHelper.unescapeLogField(match.group(4)).trim(), + LogHelper.unescapeLogField(match.group(5)).trim()); return logLine; } else { match = validBase.matcher(line); if (match.matches()) { LogLine logLine = new LogLine(parseDate(match.group(1).trim()), - LogManager.unescapeLogField(match.group(2)).trim(), + LogHelper.unescapeLogField(match.group(2)).trim(), null, null, null diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java b/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java index 25c6d8cb9cf8..c5fe0072f514 100644 --- a/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java +++ b/dspace-api/src/main/java/org/dspace/app/statistics/ReportGenerator.java @@ -27,6 +27,10 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; import org.dspace.content.Item; import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataValue; @@ -162,7 +166,7 @@ public class ReportGenerator { /** * pattern that matches an unqualified aggregator property */ - private static final Pattern real = Pattern.compile("^(.+)=(.+)"); + private static final Pattern REAL = Pattern.compile("^(.+)=(.+)"); ////////////////////////// // Miscellaneous variables @@ -221,28 +225,46 @@ public static void main(String[] argv) String myOutput = null; String myMap = null; - // read in our command line options - for (int i = 0; i < argv.length; i++) { - if (argv[i].equals("-format")) { - myFormat = argv[i + 1].toLowerCase(); - } + Options options = new Options(); + Option option; - if (argv[i].equals("-in")) { - myInput = argv[i + 1]; - } + option = Option.builder().longOpt("format").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-out")) { - myOutput = argv[i + 1]; - } + option = Option.builder().longOpt("in").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-map")) { - myMap = argv[i + 1]; - } + option = Option.builder().longOpt("out").hasArg().build(); + options.addOption(option); - if (argv[i].equals("-help")) { - usage(); - System.exit(0); - } + option = Option.builder().longOpt("map").hasArg().build(); + options.addOption(option); + + option = Option.builder().longOpt("help").build(); + options.addOption(option); + + DefaultParser parser = new DefaultParser(); + CommandLine cmd = parser.parse(options, argv); + + if (cmd.hasOption("help")) { + usage(); + System.exit(0); + } + + if (cmd.hasOption("format")) { + myFormat = cmd.getOptionValue("format"); + } + + if (cmd.hasOption("in")) { + myInput = cmd.getOptionValue("in"); + } + + if (cmd.hasOption("out")) { + myOutput = cmd.getOptionValue("out"); + } + + if (cmd.hasOption("map")) { + myMap = cmd.getOptionValue("map"); } processReport(context, myFormat, myInput, myOutput, myMap); @@ -576,7 +598,7 @@ public static void readMap(String map) // loop through the map file and read in the values while ((record = br.readLine()) != null) { - Matcher matchReal = real.matcher(record); + Matcher matchReal = REAL.matcher(record); // if the line is real then read it in if (matchReal.matches()) { @@ -650,7 +672,7 @@ public static void readInput(String input) // loop through the aggregator file and read in the values while ((record = br.readLine()) != null) { // match real lines - Matcher matchReal = real.matcher(record); + Matcher matchReal = REAL.matcher(record); // pre-prepare our input strings String section = null; diff --git a/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java b/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java index fd72b3b805c2..cc8a7024f1b2 100644 --- a/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java +++ b/dspace-api/src/main/java/org/dspace/app/statistics/StatisticsLoader.java @@ -324,11 +324,7 @@ private static File[] getAnalysisAndReportFileList() { ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); File reportDir = new File(configurationService.getProperty("log.report.dir")); - if (reportDir != null) { - return reportDir.listFiles(new AnalysisAndReportFilter()); - } - - return null; + return reportDir.listFiles(new AnalysisAndReportFilter()); } /** diff --git a/dspace-api/src/main/java/org/dspace/app/util/CacheSnooper.java b/dspace-api/src/main/java/org/dspace/app/util/CacheSnooper.java deleted file mode 100644 index 22ad518ea3b7..000000000000 --- a/dspace-api/src/main/java/org/dspace/app/util/CacheSnooper.java +++ /dev/null @@ -1,58 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.app.util; - -import net.sf.ehcache.Cache; -import net.sf.ehcache.CacheManager; -import org.dspace.core.Context; -import org.dspace.servicemanager.DSpaceKernelImpl; -import org.dspace.servicemanager.DSpaceKernelInit; -import org.dspace.services.CachingService; - -/** - * List all EhCache CacheManager and Cache instances. - * - *

This is a debugging tool, not used in the daily operation of DSpace. - * Just run it from the installed instance using - * {@code bin/dspace dsrun org.dspace.app.util.CacheSnooper} - * to check that the cache configuration is what you expect it to be, - * given your configuration. - * - *

This was created to prove a specific cache configuration patch, - * but I leave it here in the hope that it may be useful to others. - * - * @author Mark H. Wood - */ -public class CacheSnooper { - private CacheSnooper() { } - - public static void main(String[] argv) { - // Ensure that the DSpace kernel is started. - DSpaceKernelImpl kernel = DSpaceKernelInit.getKernel(null); - - // Ensure that the services cache manager is started. - CachingService serviceCaches = kernel.getServiceManager() - .getServiceByName(null, CachingService.class); - - // Ensure that the database layer is started. - Context ctx = new Context(); - - // List those caches! - for (CacheManager manager : CacheManager.ALL_CACHE_MANAGERS) { - System.out.format("CacheManager: %s%n", manager); - for (String cacheName : manager.getCacheNames()) { - Cache cache = manager.getCache(cacheName); - System.out.format(" Cache: '%s'; maxHeap: %d; maxDisk: %d%n", - cacheName, - cache.getCacheConfiguration().getMaxEntriesLocalHeap(), - cache.getCacheConfiguration().getMaxEntriesLocalDisk()); - } - } - } -} diff --git a/dspace-api/src/main/java/org/dspace/app/util/Configuration.java b/dspace-api/src/main/java/org/dspace/app/util/Configuration.java index e9b125c41cde..e4a59eeb4da4 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/Configuration.java +++ b/dspace-api/src/main/java/org/dspace/app/util/Configuration.java @@ -37,6 +37,7 @@ private Configuration() { } *

  • {@code --property name} prints the value of the DSpace configuration * property {@code name} to the standard output.
  • *
  • {@code --raw} suppresses parameter substitution in the output.
  • + *
  • {@code --first} print only the first of multiple values.
  • *
  • {@code --help} describes these options.
  • * * If the property does not exist, nothing is written. @@ -51,6 +52,8 @@ public static void main(String[] argv) { "optional name of the module in which 'property' exists"); options.addOption("r", "raw", false, "do not do property substitution on the value"); + options.addOption("f", "first", false, + "display only the first value of an array property"); options.addOption("?", "Get help"); options.addOption("h", "help", false, "Get help"); @@ -90,19 +93,36 @@ public static void main(String[] argv) { propNameBuilder.append(cmd.getOptionValue('p')); String propName = propNameBuilder.toString(); - // Print the property's value, if it exists + // Print the property's value(s), if it exists ConfigurationService cfg = DSpaceServicesFactory.getInstance().getConfigurationService(); if (!cfg.hasProperty(propName)) { System.out.println(); } else { - String val; if (cmd.hasOption('r')) { - val = cfg.getPropertyValue(propName).toString(); + // Print "raw" values (without property substitutions) + Object rawValue = cfg.getPropertyValue(propName); + if (rawValue.getClass().isArray()) { + for (Object value : (Object[]) rawValue) { + System.out.println(value.toString()); + if (cmd.hasOption('f')) { + break; // If --first print only one value + } + } + } else { // Not an array + System.out.println(rawValue.toString()); + } } else { - val = cfg.getProperty(propName); + // Print values with property substitutions + String[] values = cfg.getArrayProperty(propName); + for (String value : values) { + System.out.println(value); + if (cmd.hasOption('f')) { + break; // If --first print only one value + } + } } - System.out.println(val); } + System.exit(0); } } diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java index 32fd5d634dab..11f9aadd869b 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java @@ -10,6 +10,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import javax.annotation.Nullable; @@ -131,10 +132,15 @@ public class DCInput { private boolean closedVocabulary = false; /** - * the regex to comply with, null if nothing + * the regex in ECMAScript standard format, usable also by rests. */ private String regex = null; + /** + * the computed pattern, null if nothing + */ + private Pattern pattern = null; + /** * allowed document types */ @@ -144,8 +150,8 @@ public class DCInput { private boolean isMetadataField = false; private String relationshipType = null; private String searchConfiguration = null; - private String filter; - private List externalSources; + private final String filter; + private final List externalSources; /** * The scope of the input sets, this restricts hidden metadata fields from @@ -178,7 +184,7 @@ public DCInput(Map fieldMap, Map> listMap) //check if the input have a language tag language = Boolean.valueOf(fieldMap.get("language")); - valueLanguageList = new ArrayList(); + valueLanguageList = new ArrayList<>(); if (language) { String languageNameTmp = fieldMap.get("value-pairs-name"); if (StringUtils.isBlank(languageNameTmp)) { @@ -191,7 +197,7 @@ public DCInput(Map fieldMap, Map> listMap) repeatable = "true".equalsIgnoreCase(repStr) || "yes".equalsIgnoreCase(repStr); String nameVariantsString = fieldMap.get("name-variants"); - nameVariants = (StringUtils.isNotBlank(nameVariantsString)) ? + nameVariants = StringUtils.isNotBlank(nameVariantsString) ? nameVariantsString.equalsIgnoreCase("true") : false; label = fieldMap.get("label"); inputType = fieldMap.get("input-type"); @@ -203,11 +209,11 @@ public DCInput(Map fieldMap, Map> listMap) } hint = fieldMap.get("hint"); warning = fieldMap.get("required"); - required = (warning != null && warning.length() > 0); + required = warning != null && warning.length() > 0; visibility = fieldMap.get("visibility"); readOnly = fieldMap.get("readonly"); vocabulary = fieldMap.get("vocabulary"); - regex = fieldMap.get("regex"); + this.initRegex(fieldMap.get("regex")); String closedVocabularyStr = fieldMap.get("closedVocabulary"); closedVocabulary = "true".equalsIgnoreCase(closedVocabularyStr) || "yes".equalsIgnoreCase(closedVocabularyStr); @@ -238,6 +244,22 @@ public DCInput(Map fieldMap, Map> listMap) } + protected void initRegex(String regex) { + this.regex = null; + this.pattern = null; + if (regex != null) { + try { + Optional.ofNullable(RegexPatternUtils.computePattern(regex)) + .ifPresent(pattern -> { + this.pattern = pattern; + this.regex = regex; + }); + } catch (PatternSyntaxException e) { + log.warn("The regex field of input {} with value {} is invalid!", this.label, regex); + } + } + } + /** * Is this DCInput for display in the given scope? The scope should be * either "workflow" or "submit", as per the input forms definition. If the @@ -248,7 +270,7 @@ public DCInput(Map fieldMap, Map> listMap) * @return whether the input should be displayed or not */ public boolean isVisible(String scope) { - return (visibility == null || visibility.equals(scope)); + return visibility == null || visibility.equals(scope); } /** @@ -381,7 +403,7 @@ public String getLabel() { /** * Get the style for this form field - * + * * @return the style */ public String getStyle() { @@ -512,8 +534,12 @@ public String getScope() { return visibility; } + public Pattern getPattern() { + return this.pattern; + } + public String getRegex() { - return regex; + return this.regex; } public String getFieldName() { @@ -546,34 +572,45 @@ public boolean isQualdropValue() { public boolean validate(String value) { if (StringUtils.isNotBlank(value)) { try { - if (StringUtils.isNotBlank(regex)) { - Pattern pattern = Pattern.compile(regex); + if (this.pattern != null) { if (!pattern.matcher(value).matches()) { return false; } } } catch (PatternSyntaxException ex) { - log.error("Regex validation failed!", ex.getMessage()); + log.error("Regex validation failed! {}", ex.getMessage()); } } - return true; } /** - * Verify whether the current field contains an entity relationship - * This also implies a relationship type is defined for this field - * The field can contain both an entity relationship and a metadata field simultaneously + * Get the type bind list for use in determining whether + * to display this field in angular dynamic form building + * @return list of bound types + */ + public List getTypeBindList() { + return typeBind; + } + + /** + * Verify whether the current field contains an entity relationship. + * This also implies a relationship type is defined for this field. + * The field can contain both an entity relationship and a metadata field + * simultaneously. + * @return true if the field contains a relationship. */ public boolean isRelationshipField() { return isRelationshipField; } /** - * Verify whether the current field contains a metadata field - * This also implies a field type is defined for this field - * The field can contain both an entity relationship and a metadata field simultaneously + * Verify whether the current field contains a metadata field. + * This also implies a field type is defined for this field. + * The field can contain both an entity relationship and a metadata field + * simultaneously. + * @return true if the field contains a metadata field. */ public boolean isMetadataField() { return isMetadataField; diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java index bfd4270cf27f..c98339225600 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java @@ -7,8 +7,10 @@ */ package org.dspace.app.util; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; import org.apache.commons.lang3.StringUtils; import org.dspace.core.Utils; @@ -16,7 +18,6 @@ * Class representing all DC inputs required for a submission, organized into pages * * @author Brian S. Hughes, based on work by Jenny Toves, OCLC - * @version $Revision$ */ public class DCInputSet { @@ -33,7 +34,6 @@ public class DCInputSet { * constructor * * @param formName form name - * @param mandatoryFlags * @param rows the rows * @param listMap map */ @@ -119,9 +119,12 @@ public boolean isFieldPresent(String fieldName) { return true; } } + } else if (field.isRelationshipField() && + ("relation." + field.getRelationshipType()).equals(fieldName)) { + return true; } else { String fullName = field.getFieldName(); - if (fullName.equals(fieldName)) { + if (Objects.equals(fullName, fieldName)) { return true; } } @@ -176,4 +179,50 @@ protected boolean doField(DCInput dcf, boolean addTitleAlternative, return true; } + /** + * Iterate DC input rows and populate a list of all allowed field names in this submission configuration. + * This is important because an input can be configured repeatedly in a form (for example it could be required + * for type Book, and allowed but not required for type Article). + * If the field is allowed for this document type it'll never be stripped from metadata on validation. + * + * This can be more efficient than isFieldPresent to avoid looping the input set with each check. + * + * @param documentTypeValue Document type eg. Article, Book + * @return ArrayList of field names to use in validation + */ + public List populateAllowedFieldNames(String documentTypeValue) { + List allowedFieldNames = new ArrayList<>(); + // Before iterating each input for validation, run through all inputs + fields and populate a lookup + // map with inputs for this type. Because an input can be configured repeatedly in a form (for example + // it could be required for type Book, and allowed but not required for type Article), allowed=true will + // always take precedence + for (DCInput[] row : inputs) { + for (DCInput input : row) { + if (input.isQualdropValue()) { + List inputPairs = input.getPairs(); + //starting from the second element of the list and skipping one every time because the display + // values are also in the list and before the stored values. + for (int i = 1; i < inputPairs.size(); i += 2) { + String fullFieldname = input.getFieldName() + "." + inputPairs.get(i); + if (input.isAllowedFor(documentTypeValue)) { + if (!allowedFieldNames.contains(fullFieldname)) { + allowedFieldNames.add(fullFieldname); + } + // For the purposes of qualdrop, we have to add the field name without the qualifier + // too, or a required qualdrop will get confused and incorrectly reject a value + if (!allowedFieldNames.contains(input.getFieldName())) { + allowedFieldNames.add(input.getFieldName()); + } + } + } + } else { + if (input.isAllowedFor(documentTypeValue) && !allowedFieldNames.contains(input.getFieldName())) { + allowedFieldNames.add(input.getFieldName()); + } + } + } + } + return allowedFieldNames; + } + } diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java index 6343ef4fe15b..38692c73a6ce 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputsReader.java @@ -24,6 +24,7 @@ import org.dspace.content.MetadataSchemaEnum; import org.dspace.core.Utils; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.factory.SubmissionServiceFactory; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; @@ -158,7 +159,8 @@ public List getInputsByCollectionHandle(String collectionHandle) throws DCInputsReaderException { SubmissionConfig config; try { - config = new SubmissionConfigReader().getSubmissionConfigByCollection(collectionHandle); + config = SubmissionServiceFactory.getInstance().getSubmissionConfigService() + .getSubmissionConfigByCollection(collectionHandle); String formName = config.getSubmissionName(); if (formName == null) { throw new DCInputsReaderException("No form designated as default"); @@ -180,7 +182,8 @@ public List getInputsBySubmissionName(String name) throws DCInputsReaderException { SubmissionConfig config; try { - config = new SubmissionConfigReader().getSubmissionConfigByName(name); + config = SubmissionServiceFactory.getInstance().getSubmissionConfigService() + .getSubmissionConfigByName(name); String formName = config.getSubmissionName(); if (formName == null) { throw new DCInputsReaderException("No form designated as default"); diff --git a/dspace-api/src/main/java/org/dspace/app/util/GoogleBitstreamComparator.java b/dspace-api/src/main/java/org/dspace/app/util/GoogleBitstreamComparator.java index add98af96f96..ae6ba7e83f55 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/GoogleBitstreamComparator.java +++ b/dspace-api/src/main/java/org/dspace/app/util/GoogleBitstreamComparator.java @@ -86,8 +86,10 @@ public int compare(Bitstream b1, Bitstream b2) { if (priority1 > priority2) { return 1; } else if (priority1 == priority2) { - if (b1.getSizeBytes() <= b2.getSizeBytes()) { + if (b1.getSizeBytes() < b2.getSizeBytes()) { return 1; + } else if (b1.getSizeBytes() == b2.getSizeBytes()) { + return 0; } else { return -1; } diff --git a/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java b/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java index 0021f267005f..c4f3f2235e35 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java +++ b/dspace-api/src/main/java/org/dspace/app/util/GoogleMetadata.java @@ -42,7 +42,7 @@ import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; +import org.jdom2.Element; /** * Configuration and mapping for Google Scholar output metadata @@ -470,11 +470,7 @@ protected ArrayList> parseOptions(String configFilter) { parsedOptions.add(parsedFields); } - if (null != parsedOptions) { - return parsedOptions; - } else { - return null; - } + return parsedOptions; } /** diff --git a/dspace-api/src/main/java/org/dspace/app/util/InitializeEntities.java b/dspace-api/src/main/java/org/dspace/app/util/InitializeEntities.java index 5b2413642ccc..0a072a9819eb 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/InitializeEntities.java +++ b/dspace-api/src/main/java/org/dspace/app/util/InitializeEntities.java @@ -10,8 +10,6 @@ import java.io.File; import java.io.IOException; import java.sql.SQLException; -import java.util.LinkedList; -import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; @@ -30,7 +28,6 @@ import org.dspace.content.RelationshipType; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.EntityTypeService; -import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipTypeService; import org.dspace.core.Context; import org.w3c.dom.Document; @@ -40,22 +37,20 @@ import org.xml.sax.SAXException; /** - * This script is used to initialize the database with a set of relationshiptypes that are written + * This script is used to initialize the database with a set of relationship types that are written * in an xml file that is given to this script. - * This XML file needs to have a proper XML structure and needs to define the variables of the RelationshipType object + * This XML file needs to have a proper XML structure and needs to define the variables of the RelationshipType object. */ public class InitializeEntities { private final static Logger log = LogManager.getLogger(); private final RelationshipTypeService relationshipTypeService; - private final RelationshipService relationshipService; private final EntityTypeService entityTypeService; private InitializeEntities() { relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService(); - relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); } @@ -111,14 +106,12 @@ private void parseXMLToRelations(Context context, String fileLocation) throws Au try { File fXmlFile = new File(fileLocation); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); - DocumentBuilder dBuilder = null; - dBuilder = dbFactory.newDocumentBuilder(); + DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(fXmlFile); doc.getDocumentElement().normalize(); NodeList nList = doc.getElementsByTagName("type"); - List relationshipTypes = new LinkedList<>(); for (int i = 0; i < nList.getLength(); i++) { Node nNode = nList.item(i); diff --git a/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java index 97f25cb2b213..514143c93ea0 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/app/util/OpenSearchServiceImpl.java @@ -16,10 +16,11 @@ import java.util.List; import java.util.Map; -import com.sun.syndication.feed.module.opensearch.OpenSearchModule; -import com.sun.syndication.feed.module.opensearch.entity.OSQuery; -import com.sun.syndication.feed.module.opensearch.impl.OpenSearchModuleImpl; -import com.sun.syndication.io.FeedException; +import com.rometools.modules.opensearch.OpenSearchModule; +import com.rometools.modules.opensearch.entity.OSQuery; +import com.rometools.modules.opensearch.impl.OpenSearchModuleImpl; +import com.rometools.rome.io.FeedException; +import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.app.util.service.OpenSearchService; import org.dspace.content.DSpaceObject; @@ -29,11 +30,11 @@ import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.output.DOMOutputter; -import org.jdom.output.XMLOutputter; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.output.DOMOutputter; +import org.jdom2.output.XMLOutputter; import org.springframework.beans.factory.annotation.Autowired; import org.w3c.dom.Document; @@ -96,7 +97,7 @@ protected String getBaseSearchServiceURL() { * Get base search UI URL (websvc.opensearch.uicontext) */ protected String getBaseSearchUIURL() { - return configurationService.getProperty("dspace.server.url") + "/" + + return configurationService.getProperty("dspace.ui.url") + "/" + configurationService.getProperty("websvc.opensearch.uicontext"); } @@ -177,7 +178,9 @@ protected OpenSearchModule openSearchMarkup(String query, int totalResults, int OSQuery osq = new OSQuery(); osq.setRole("request"); try { - osq.setSearchTerms(URLEncoder.encode(query, "UTF-8")); + if (StringUtils.isNotBlank(query)) { + osq.setSearchTerms(URLEncoder.encode(query, "UTF-8")); + } } catch (UnsupportedEncodingException e) { log.error(e); } @@ -192,7 +195,7 @@ protected OpenSearchModule openSearchMarkup(String query, int totalResults, int * @param scope - null for the entire repository, or a collection/community handle * @return Service Document */ - protected org.jdom.Document getServiceDocument(String scope) { + protected org.jdom2.Document getServiceDocument(String scope) { ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); Namespace ns = Namespace.getNamespace(osNs); @@ -245,7 +248,7 @@ protected org.jdom.Document getServiceDocument(String scope) { url.setAttribute("template", template.toString()); root.addContent(url); } - return new org.jdom.Document(root); + return new org.jdom2.Document(root); } /** @@ -255,7 +258,7 @@ protected org.jdom.Document getServiceDocument(String scope) { * @return W3C Document object * @throws IOException if IO error */ - protected Document jDomToW3(org.jdom.Document jdomDoc) throws IOException { + protected Document jDomToW3(org.jdom2.Document jdomDoc) throws IOException { DOMOutputter domOut = new DOMOutputter(); try { return domOut.output(jdomDoc); diff --git a/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java b/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java index 1e018ff889ab..5dd286726d49 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java +++ b/dspace-api/src/main/java/org/dspace/app/util/OptimizeSelectCollection.java @@ -11,7 +11,6 @@ import java.util.ArrayList; import java.util.List; -import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; @@ -23,12 +22,12 @@ import org.springframework.util.StopWatch; /** + * A command line tool to verify/test the accuracy and speed gains of + * {@link Collection.findAuthorizedOptimized}. + * Invocation: {@code dsrun org.dspace.app.util.OptimizeSelectCollection} * @author peterdietz - * A command line tool to verify/test the accuracy and speed gains of Collection.findAuthorizedOptimized() - * Invocation: dsrun org.dspace.app.util.OptimizeSelectCollection */ public class OptimizeSelectCollection { - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(OptimizeSelectCollection.class); private static Context context; private static ArrayList brokenPeople; @@ -49,7 +48,7 @@ public static void main(String[] argv) throws Exception { "values as the legacy select-collection logic."); context = new Context(); - brokenPeople = new ArrayList(); + brokenPeople = new ArrayList<>(); int peopleChecked = 0; timeSavedMS = 0L; @@ -68,7 +67,7 @@ public static void main(String[] argv) throws Exception { } } - if (brokenPeople.size() > 0) { + if (!brokenPeople.isEmpty()) { System.out.println("NOT DONE YET!!! Some people don't have all their collections."); for (EPerson person : brokenPeople) { System.out.println("-- " + person.getEmail()); @@ -90,7 +89,7 @@ private static void checkSelectCollectionForUser(EPerson person) throws SQLExcep stopWatch.start("findAuthorized"); List collections = collectionService.findAuthorized(context, null, Constants.ADD); stopWatch.stop(); - Long defaultMS = stopWatch.getLastTaskTimeMillis(); + long defaultMS = stopWatch.getLastTaskTimeMillis(); stopWatch.start("ListingCollections"); System.out.println("Legacy Find Authorized"); @@ -100,7 +99,7 @@ private static void checkSelectCollectionForUser(EPerson person) throws SQLExcep stopWatch.start("findAuthorizedOptimized"); List collectionsOptimized = collectionService.findAuthorizedOptimized(context, Constants.ADD); stopWatch.stop(); - Long optimizedMS = stopWatch.getLastTaskTimeMillis(); + long optimizedMS = stopWatch.getLastTaskTimeMillis(); timeSavedMS += defaultMS - optimizedMS; diff --git a/dspace-api/src/main/java/org/dspace/app/util/RegexPatternUtils.java b/dspace-api/src/main/java/org/dspace/app/util/RegexPatternUtils.java new file mode 100644 index 000000000000..578e57fb0909 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/util/RegexPatternUtils.java @@ -0,0 +1,73 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.util; + +import static java.util.regex.Pattern.CASE_INSENSITIVE; + +import java.util.Optional; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +import org.apache.commons.lang3.StringUtils; + +/** + * Utility class useful for check regex and patterns. + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public class RegexPatternUtils { + + // checks input having the format /{pattern}/{flags} + // allowed flags are: g,i,m,s,u,y + public static final String REGEX_INPUT_VALIDATOR = "(/?)(.+)\\1([gimsuy]*)"; + // flags usable inside regex definition using format (?i|m|s|u|y) + public static final String REGEX_FLAGS = "(?%s)"; + public static final Pattern PATTERN_REGEX_INPUT_VALIDATOR = + Pattern.compile(REGEX_INPUT_VALIDATOR, CASE_INSENSITIVE); + + /** + * Computes a pattern starting from a regex definition with flags that + * uses the standard format: /{regex}/{flags} (ECMAScript format). + * This method can transform an ECMAScript regex into a java {@code Pattern} object + * wich can be used to validate strings. + *
    + * If regex is null, empty or blank a null {@code Pattern} will be retrieved + * If it's a valid regex, then a non-null {@code Pattern} will be retrieved, + * an exception will be thrown otherwise. + * + * @param regex with format /{regex}/{flags} + * @return {@code Pattern} regex pattern instance + * @throws PatternSyntaxException + */ + public static final Pattern computePattern(String regex) throws PatternSyntaxException { + if (StringUtils.isBlank(regex)) { + return null; + } + Matcher inputMatcher = PATTERN_REGEX_INPUT_VALIDATOR.matcher(regex); + String regexPattern = regex; + String regexFlags = ""; + if (inputMatcher.matches()) { + regexPattern = + Optional.of(inputMatcher.group(2)) + .filter(StringUtils::isNotBlank) + .orElse(regex); + regexFlags = + Optional.ofNullable(inputMatcher.group(3)) + .filter(StringUtils::isNotBlank) + .map(flags -> String.format(REGEX_FLAGS, flags)) + .orElse("") + .replaceAll("g", ""); + } + return Pattern.compile(regexFlags + regexPattern); + } + + private RegexPatternUtils() {} + +} diff --git a/dspace-api/src/main/java/org/dspace/app/util/RelationshipUtils.java b/dspace-api/src/main/java/org/dspace/app/util/RelationshipUtils.java new file mode 100644 index 000000000000..c63d2fdfdf73 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/util/RelationshipUtils.java @@ -0,0 +1,69 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.util; + +import java.util.List; + +import org.dspace.content.RelationshipType; + +public class RelationshipUtils { + + private RelationshipUtils() { + } + + /** + * Matches two Entity types to a Relationship Type from a set of Relationship Types. + * + * Given a list of Relationship Types, this method will find a Relationship Type that + * is configured between the originType and the targetType, with the matching originTypeName. + * It will match a relationship between these two entities in either direction (eg leftward + * or rightward). + * + * Example: originType = Author, targetType = Publication, originTypeName = isAuthorOfPublication. + * + * @param relTypes set of Relationship Types in which to find a match. + * @param targetType entity type of target (eg. Publication). + * @param originType entity type of origin referer (eg. Author). + * @param originTypeName the name of the relationship (eg. isAuthorOfPublication) + * @return null or matched Relationship Type. + */ + public static RelationshipType matchRelationshipType(List relTypes, String targetType, + String originType, String originTypeName) { + RelationshipType foundRelationshipType = null; + if (originTypeName.split("\\.").length > 1) { + originTypeName = originTypeName.split("\\.")[1]; + } + for (RelationshipType relationshipType : relTypes) { + // Is origin type leftward or righward + boolean isLeft = false; + if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType)) { + isLeft = true; + } + if (isLeft) { + // Validate typeName reference + if (!relationshipType.getLeftwardType().equalsIgnoreCase(originTypeName)) { + continue; + } + if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(originType) + && relationshipType.getRightType().getLabel().equalsIgnoreCase(targetType)) { + foundRelationshipType = relationshipType; + } + } else { + if (!relationshipType.getRightwardType().equalsIgnoreCase(originTypeName)) { + continue; + } + if (relationshipType.getLeftType().getLabel().equalsIgnoreCase(targetType) + && relationshipType.getRightType().getLabel().equalsIgnoreCase(originType)) { + foundRelationshipType = relationshipType; + } + } + } + return foundRelationshipType; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 21208483583e..0f144fd69f46 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -22,7 +22,10 @@ import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.factory.DSpaceServicesFactory; import org.w3c.dom.Document; @@ -105,6 +108,13 @@ public class SubmissionConfigReader { */ private SubmissionConfig lastSubmissionConfig = null; + /** + * Collection Service instance, needed to interact with collection's + * stored data + */ + protected static final CollectionService collectionService + = ContentServiceFactory.getInstance().getCollectionService(); + /** * Load Submission Configuration from the * item-submission.xml configuration file @@ -152,6 +162,9 @@ private void buildInputs(String fileName) throws SubmissionConfigReaderException } catch (FactoryConfigurationError fe) { throw new SubmissionConfigReaderException( "Cannot create Item Submission Configuration parser", fe); + } catch (SearchServiceException se) { + throw new SubmissionConfigReaderException( + "Cannot perform a discovery search for Item Submission Configuration", se); } catch (Exception e) { throw new SubmissionConfigReaderException( "Error creating Item Submission Configuration: " + e); @@ -287,7 +300,7 @@ public SubmissionStepConfig getStepConfig(String stepID) * should correspond to the collection-form maps, the form definitions, and * the display/storage word pairs. */ - private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException { + private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException { if (n == null) { return; } @@ -334,18 +347,23 @@ private void doNodes(Node n) throws SAXException, SubmissionConfigReaderExceptio * the collection handle and item submission name, put name in hashmap keyed * by the collection handle. */ - private void processMap(Node e) throws SAXException { + private void processMap(Node e) throws SAXException, SearchServiceException { + // create a context + Context context = new Context(); + NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); if (nd.getNodeName().equals("name-map")) { String id = getAttribute(nd, "collection-handle"); + String entityType = getAttribute(nd, "collection-entity-type"); String value = getAttribute(nd, "submission-name"); String content = getValue(nd); - if (id == null) { + if (id == null && entityType == null) { throw new SAXException( - "name-map element is missing collection-handle attribute in 'item-submission.xml'"); + "name-map element is missing collection-handle or collection-entity-type attribute " + + "in 'item-submission.xml'"); } if (value == null) { throw new SAXException( @@ -355,7 +373,17 @@ private void processMap(Node e) throws SAXException { throw new SAXException( "name-map element has content in 'item-submission.xml', it should be empty."); } - collectionToSubmissionConfig.put(id, value); + if (id != null) { + collectionToSubmissionConfig.put(id, value); + + } else { + // get all collections for this entity-type + List collections = collectionService.findAllCollectionsByEntityType( context, + entityType); + for (Collection collection : collections) { + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + } + } } // ignore any child node that isn't a "name-map" } } @@ -635,4 +663,4 @@ public List getCollectionsBySubmissionConfig(Context context, String } return results; } -} +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java index 7cd9dacd03c1..28d39d911b95 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java @@ -11,6 +11,9 @@ import java.util.Map; import org.apache.commons.lang3.BooleanUtils; +import org.dspace.content.InProgressSubmission; +import org.dspace.content.WorkspaceItem; +import org.hibernate.proxy.HibernateProxyHelper; /** * Class representing configuration for a single step within an Item Submission @@ -30,6 +33,7 @@ public class SubmissionStepConfig implements Serializable { public static final String INPUT_FORM_STEP_NAME = "submission-form"; public static final String UPLOAD_STEP_NAME = "upload"; + public static final String ACCESS_CONDITION_STEP_NAME = "accessCondition"; /* * The identifier for the Select Collection step @@ -172,6 +176,38 @@ public String getVisibilityOutside() { return visibilityOutside; } + /** + * Check if given submission section object is hidden for the current submission scope + * + * @param obj the InProgressSubmission to check + * @return true if the submission section is hidden, false otherwise + */ + public boolean isHiddenForInProgressSubmission(InProgressSubmission obj) { + + String scopeToCheck = getScope(obj); + + if (scope == null || scopeToCheck == null) { + return false; + } + + String visibility = getVisibility(); + String visibilityOutside = getVisibilityOutside(); + + if (scope.equalsIgnoreCase(scopeToCheck)) { + return "hidden".equalsIgnoreCase(visibility); + } else { + return visibilityOutside == null || "hidden".equalsIgnoreCase(visibilityOutside); + } + + } + + private String getScope(InProgressSubmission obj) { + if (HibernateProxyHelper.getClassWithoutInitializingProxy(obj).equals(WorkspaceItem.class)) { + return "submission"; + } + return "workflow"; + } + /** * Get the number of this step in the current Submission process config. * Step numbers start with #0 (although step #0 is ALWAYS the special diff --git a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java index 2576df0193be..c1402499c444 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java @@ -15,26 +15,26 @@ import java.util.Map; import javax.servlet.http.HttpServletRequest; -import com.sun.syndication.feed.module.DCModule; -import com.sun.syndication.feed.module.DCModuleImpl; -import com.sun.syndication.feed.module.Module; -import com.sun.syndication.feed.module.itunes.EntryInformation; -import com.sun.syndication.feed.module.itunes.EntryInformationImpl; -import com.sun.syndication.feed.module.itunes.types.Duration; -import com.sun.syndication.feed.synd.SyndContent; -import com.sun.syndication.feed.synd.SyndContentImpl; -import com.sun.syndication.feed.synd.SyndEnclosure; -import com.sun.syndication.feed.synd.SyndEnclosureImpl; -import com.sun.syndication.feed.synd.SyndEntry; -import com.sun.syndication.feed.synd.SyndEntryImpl; -import com.sun.syndication.feed.synd.SyndFeed; -import com.sun.syndication.feed.synd.SyndFeedImpl; -import com.sun.syndication.feed.synd.SyndImage; -import com.sun.syndication.feed.synd.SyndImageImpl; -import com.sun.syndication.feed.synd.SyndPerson; -import com.sun.syndication.feed.synd.SyndPersonImpl; -import com.sun.syndication.io.FeedException; -import com.sun.syndication.io.SyndFeedOutput; +import com.rometools.modules.itunes.EntryInformation; +import com.rometools.modules.itunes.EntryInformationImpl; +import com.rometools.modules.itunes.types.Duration; +import com.rometools.rome.feed.module.DCModule; +import com.rometools.rome.feed.module.DCModuleImpl; +import com.rometools.rome.feed.module.Module; +import com.rometools.rome.feed.synd.SyndContent; +import com.rometools.rome.feed.synd.SyndContentImpl; +import com.rometools.rome.feed.synd.SyndEnclosure; +import com.rometools.rome.feed.synd.SyndEnclosureImpl; +import com.rometools.rome.feed.synd.SyndEntry; +import com.rometools.rome.feed.synd.SyndEntryImpl; +import com.rometools.rome.feed.synd.SyndFeed; +import com.rometools.rome.feed.synd.SyndFeedImpl; +import com.rometools.rome.feed.synd.SyndImage; +import com.rometools.rome.feed.synd.SyndImageImpl; +import com.rometools.rome.feed.synd.SyndPerson; +import com.rometools.rome.feed.synd.SyndPersonImpl; +import com.rometools.rome.io.FeedException; +import com.rometools.rome.io.SyndFeedOutput; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -51,6 +51,7 @@ import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.core.I18nUtil; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; @@ -91,6 +92,7 @@ public class SyndicationFeed { // default DC fields for entry protected String defaultTitleField = "dc.title"; + protected String defaultDescriptionField = "dc.description"; protected String defaultAuthorField = "dc.contributor.author"; protected String defaultDateField = "dc.date.issued"; private static final String[] defaultDescriptionFields = @@ -193,20 +195,18 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec String defaultTitle = null; boolean podcastFeed = false; this.request = request; - // dso is null for the whole site, or a search without scope if (dso == null) { defaultTitle = configurationService.getProperty("dspace.name"); - feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION)); + defaultDescriptionField = localize(labels, MSG_FEED_DESCRIPTION); objectURL = resolveURL(request, null); - logoURL = configurationService.getProperty("webui.feed.logo.url"); } else { Bitstream logo = null; if (dso instanceof IndexableCollection) { Collection col = ((IndexableCollection) dso).getIndexedObject(); defaultTitle = col.getName(); - feed.setDescription(collectionService.getMetadataFirstValue(col, - CollectionService.MD_SHORT_DESCRIPTION, Item.ANY)); + defaultDescriptionField = collectionService.getMetadataFirstValue(col, + CollectionService.MD_SHORT_DESCRIPTION, Item.ANY); logo = col.getLogo(); String cols = configurationService.getProperty("webui.feed.podcast.collections"); if (cols != null && cols.length() > 1 && cols.contains(col.getHandle())) { @@ -216,8 +216,8 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec } else if (dso instanceof IndexableCommunity) { Community comm = ((IndexableCommunity) dso).getIndexedObject(); defaultTitle = comm.getName(); - feed.setDescription(communityService.getMetadataFirstValue(comm, - CommunityService.MD_SHORT_DESCRIPTION, Item.ANY)); + defaultDescriptionField = communityService.getMetadataFirstValue(comm, + CommunityService.MD_SHORT_DESCRIPTION, Item.ANY); logo = comm.getLogo(); String comms = configurationService.getProperty("webui.feed.podcast.communities"); if (comms != null && comms.length() > 1 && comms.contains(comm.getHandle())) { @@ -232,6 +232,12 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec } feed.setTitle(labels.containsKey(MSG_FEED_TITLE) ? localize(labels, MSG_FEED_TITLE) : defaultTitle); + + if (defaultDescriptionField == null || defaultDescriptionField == "") { + defaultDescriptionField = I18nUtil.getMessage("org.dspace.app.util.SyndicationFeed.no-description"); + } + + feed.setDescription(defaultDescriptionField); feed.setLink(objectURL); feed.setPublishedDate(new Date()); feed.setUri(objectURL); @@ -329,7 +335,8 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec dcDescriptionField != null) { DCModule dc = new DCModuleImpl(); if (dcCreatorField != null) { - List dcAuthors = itemService.getMetadataByMetadataString(item, dcCreatorField); + List dcAuthors = itemService + .getMetadataByMetadataString(item, dcCreatorField); if (dcAuthors.size() > 0) { List creators = new ArrayList<>(); for (MetadataValue author : dcAuthors) { @@ -345,7 +352,8 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec } } if (dcDescriptionField != null) { - List v = itemService.getMetadataByMetadataString(item, dcDescriptionField); + List v = itemService + .getMetadataByMetadataString(item, dcDescriptionField); if (v.size() > 0) { StringBuilder descs = new StringBuilder(); for (MetadataValue d : v) { @@ -376,6 +384,7 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec enc.setLength(bit.getSizeBytes()); enc.setUrl(urlOfBitstream(request, bit)); enclosures.add(enc); + } } } @@ -419,7 +428,7 @@ public void populate(HttpServletRequest request, Context context, IndexableObjec // with length of song in seconds if (extent != null && extent.length() > 0) { extent = extent.split(" ")[0]; - Integer duration = Integer.parseInt(extent); + long duration = Long.parseLong(extent); itunes.setDuration(new Duration(duration)); // } diff --git a/dspace-api/src/main/java/org/dspace/app/util/Util.java b/dspace-api/src/main/java/org/dspace/app/util/Util.java index aa04c13be7d4..f8ef3b1731f7 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/Util.java +++ b/dspace-api/src/main/java/org/dspace/app/util/Util.java @@ -38,13 +38,12 @@ * * @author Robert Tansley * @author Mark Diggory - * @version $Revision$ */ public class Util { // cache for source version result private static String sourceVersion = null; - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Util.class); + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(); /** * Default constructor. Must be protected as org.dspace.xmlworkflow.WorkflowUtils extends it @@ -60,7 +59,7 @@ protected Util() { } * spaces */ public static String nonBreakSpace(String s) { - StringBuffer newString = new StringBuffer(); + StringBuilder newString = new StringBuilder(); for (int i = 0; i < s.length(); i++) { char ch = s.charAt(i); @@ -99,7 +98,7 @@ public static String encodeBitstreamName(String stringIn, String encoding) return ""; } - StringBuffer out = new StringBuffer(); + StringBuilder out = new StringBuilder(); final String[] pctEncoding = {"%00", "%01", "%02", "%03", "%04", "%05", "%06", "%07", "%08", "%09", "%0a", "%0b", "%0c", "%0d", @@ -263,7 +262,7 @@ public static List getUUIDParameters(HttpServletRequest request, return null; } - List return_values = new ArrayList(request_values.length); + List return_values = new ArrayList<>(request_values.length); for (String s : request_values) { try { @@ -402,7 +401,7 @@ public static List getControlledVocabulariesDisplayValueLocalized( Item item, List values, String schema, String element, String qualifier, Locale locale) throws SQLException, DCInputsReaderException { - List toReturn = new ArrayList(); + List toReturn = new ArrayList<>(); DCInput myInputs = null; boolean myInputsFound = false; String formFileName = I18nUtil.getInputFormsFileName(locale); @@ -478,8 +477,9 @@ public static List getControlledVocabulariesDisplayValueLocalized( } /** - * Split a list in an array of i sub-lists uniformly sized + * Split a list in an array of i sub-lists uniformly sized. * + * @param type of objects in the list. * @param idsList the list to split * @param i the number of sublists to return * diff --git a/dspace-api/src/main/java/org/dspace/app/util/WebApp.java b/dspace-api/src/main/java/org/dspace/app/util/WebApp.java index 7cd2bd8fea1e..2f42c1459f63 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/WebApp.java +++ b/dspace-api/src/main/java/org/dspace/app/util/WebApp.java @@ -58,6 +58,7 @@ protected WebApp() { } + @Override public Integer getID() { return id; } diff --git a/dspace-api/src/main/java/org/dspace/app/util/XMLUtils.java b/dspace-api/src/main/java/org/dspace/app/util/XMLUtils.java index 884b2a6a92b7..c39d0d26fd5e 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/XMLUtils.java +++ b/dspace-api/src/main/java/org/dspace/app/util/XMLUtils.java @@ -8,7 +8,6 @@ package org.dspace.app.util; import java.util.ArrayList; -import java.util.LinkedList; import java.util.List; import org.apache.commons.lang3.StringUtils; @@ -29,13 +28,13 @@ private XMLUtils() { } /** * @param dataRoot the starting node - * @param name the name of the subelement to find + * @param name the tag name of the child element to find. * @return the list of all DOM Element with the provided name direct child * of the starting node */ public static List getElementList(Element dataRoot, String name) { NodeList list = dataRoot.getElementsByTagName(name); - List listElements = new ArrayList(); + List listElements = new ArrayList<>(); for (int i = 0; i < list.getLength(); i++) { Element item = (Element) list.item(i); if (item.getParentNode().equals(dataRoot)) { @@ -105,7 +104,7 @@ public static Element getSingleElement(Element dataRoot, String name) { /** * @param rootElement the starting node - * @param subElementName the name of the subelement to find + * @param subElementName the tag name of the child element to find. * @return a list of string including all the text contents of the sub * element with the specified name. If there are not sub element * with the supplied name the method will return null @@ -121,7 +120,7 @@ public static List getElementValueList(Element rootElement, return null; } - List result = new LinkedList(); + List result = new ArrayList<>(); for (Element el : subElements) { if (StringUtils.isNotBlank(el.getTextContent())) { result.add(el.getTextContent().trim()); @@ -152,7 +151,7 @@ public static List getElementValueArrayList(Element rootElement, return null; } - List result = new LinkedList(); + List result = new ArrayList<>(); for (Element el : subElements) { String[] tmp = new String[fieldsName.length]; for (int idx = 0; idx < fieldsName.length; idx++) { diff --git a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java index 12c925b4850a..500ee04a979b 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationMethod.java @@ -153,6 +153,22 @@ public boolean allowSetPassword(Context context, public List getSpecialGroups(Context context, HttpServletRequest request) throws SQLException; + /** + * Returns true if the special groups returned by + * {@link org.dspace.authenticate.AuthenticationMethod#getSpecialGroups(Context, HttpServletRequest)} + * should be implicitly be added to the groups related to the current user. By + * default this is true if the authentication method is the actual + * authentication mechanism used by the user. + * @param context A valid DSpace context. + * @param request The request that started this operation, or null if not + * applicable. + * @return true is the special groups must be considered, false + * otherwise + */ + public default boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) { + return getName().equals(context.getAuthenticationMethod()); + } + /** * Authenticate the given or implicit credentials. * This is the heart of the authentication method: test the @@ -216,4 +232,23 @@ public String loginPageURL(Context context, * @return The authentication method name */ public String getName(); + + /** + * Get whether the authentication method is being used. + * @param context The DSpace context + * @param request The current request + * @return whether the authentication method is being used. + */ + public boolean isUsed(Context context, HttpServletRequest request); + + /** + * Check if the given current password is valid to change the password of the + * given ePerson + * @param context The DSpace context + * @param ePerson the ePerson related to the password change + * @param currentPassword The current password to check + * @return true if the provided password matches with current + * password + */ + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword); } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java index eb5e0a03f9c2..1d67da37ecb3 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/AuthenticationServiceImpl.java @@ -179,10 +179,15 @@ public List getSpecialGroups(Context context, int totalLen = 0; for (AuthenticationMethod method : getAuthenticationMethodStack()) { - List gl = method.getSpecialGroups(context, request); - if (gl.size() > 0) { - result.addAll(gl); - totalLen += gl.size(); + + if (method.areSpecialGroupsApplicable(context, request)) { + + List gl = method.getSpecialGroups(context, request); + if (gl.size() > 0) { + result.addAll(gl); + totalLen += gl.size(); + } + } } @@ -193,4 +198,30 @@ public List getSpecialGroups(Context context, public Iterator authenticationMethodIterator() { return getAuthenticationMethodStack().iterator(); } + + @Override + public String getAuthenticationMethod(final Context context, final HttpServletRequest request) { + final Iterator authenticationMethodIterator = authenticationMethodIterator(); + + while (authenticationMethodIterator.hasNext()) { + final AuthenticationMethod authenticationMethod = authenticationMethodIterator.next(); + if (authenticationMethod.isUsed(context, request)) { + return authenticationMethod.getName(); + } + } + + return null; + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + + for (AuthenticationMethod method : getAuthenticationMethodStack()) { + if (method.getName().equals(context.getAuthenticationMethod())) { + return method.canChangePassword(context, ePerson, currentPassword); + } + } + + return false; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java index e18c4eddd73c..0c2be211a532 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/IPAuthentication.java @@ -19,7 +19,7 @@ import org.apache.logging.log4j.Logger; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.core.factory.CoreServiceFactory; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; @@ -52,11 +52,6 @@ public class IPAuthentication implements AuthenticationMethod { */ private static Logger log = org.apache.logging.log4j.LogManager.getLogger(IPAuthentication.class); - /** - * Whether to look for x-forwarded headers for logging IP addresses - */ - protected static Boolean useProxies; - /** * All the IP matchers */ @@ -194,7 +189,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) groups.add(group); } else { - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "configuration_error", "unknown_group=" + groupName)); } @@ -202,7 +197,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) } } } catch (IPMatcherException ipme) { - log.warn(LogManager.getHeader(context, "configuration_error", + log.warn(LogHelper.getHeader(context, "configuration_error", "bad_ip=" + addr), ipme); } } @@ -228,7 +223,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) groups.remove(group); } else { - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "configuration_error", "unknown_group=" + groupName)); } @@ -236,7 +231,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) } } } catch (IPMatcherException ipme) { - log.warn(LogManager.getHeader(context, "configuration_error", + log.warn(LogHelper.getHeader(context, "configuration_error", "bad_ip=" + addr), ipme); } } @@ -248,15 +243,20 @@ public List getSpecialGroups(Context context, HttpServletRequest request) gsb.append(group.getID()).append(", "); } - log.debug(LogManager.getHeader(context, "authenticated", + log.debug(LogHelper.getHeader(context, "authenticated", "special_groups=" + gsb.toString() - + " (by IP=" + addr + ", useProxies=" + useProxies.toString() + ")" + + " (by IP=" + addr + ")" )); } return groups; } + @Override + public boolean areSpecialGroupsApplicable(Context context, HttpServletRequest request) { + return true; + } + @Override public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) throws SQLException { @@ -273,4 +273,14 @@ public String loginPageURL(Context context, HttpServletRequest request, public String getName() { return "ip"; } + + @Override + public boolean isUsed(final Context context, final HttpServletRequest request) { + return false; + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java index 55f416031555..585eaf9cd8b1 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java @@ -11,9 +11,11 @@ import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Hashtable; +import java.util.Iterator; import java.util.List; import javax.naming.NamingEnumeration; import javax.naming.NamingException; @@ -36,7 +38,7 @@ import org.dspace.authenticate.service.AuthenticationService; import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.factory.EPersonServiceFactory; @@ -64,6 +66,7 @@ * @author Reuben Pasquini * @author Samuel Ottenhoff * @author Ivan Masár + * @author Michael Plate */ public class LDAPAuthentication implements AuthenticationMethod { @@ -83,6 +86,9 @@ public class LDAPAuthentication protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private static final String LDAP_AUTHENTICATED = "ldap.authenticated"; + + /** * Let a real auth method return true if it wants. * @@ -156,7 +162,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) Group ldapGroup = groupService.findByName(context, groupName); if (ldapGroup == null) { // Oops - the group isn't there. - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "ldap_specialgroup", "Group defined in login.specialgroup does not exist")); return Collections.EMPTY_LIST; @@ -211,7 +217,7 @@ public int authenticate(Context context, String realm, HttpServletRequest request) throws SQLException { - log.info(LogManager.getHeader(context, "auth", "attempting trivial auth of user=" + netid)); + log.info(LogHelper.getHeader(context, "auth", "attempting trivial auth of user=" + netid)); // Skip out when no netid or password is given. if (netid == null || password == null) { @@ -245,7 +251,7 @@ public int authenticate(Context context, // Check a DN was found if ((dn == null) || (dn.trim().equals(""))) { - log.info(LogManager + log.info(LogHelper .getHeader(context, "failed_login", "no DN found for user " + netid)); return BAD_CREDENTIALS; } @@ -261,11 +267,12 @@ public int authenticate(Context context, if (ldap.ldapAuthenticate(dn, password, context)) { context.setCurrentUser(eperson); + request.setAttribute(LDAP_AUTHENTICATED, true); // assign user to groups based on ldap dn assignGroups(dn, ldap.ldapGroup, context); - log.info(LogManager + log.info(LogHelper .getHeader(context, "authenticate", "type=ldap")); return SUCCESS; } else { @@ -277,7 +284,7 @@ public int authenticate(Context context, if (ldap.ldapAuthenticate(dn, password, context)) { // Register the new user automatically - log.info(LogManager.getHeader(context, + log.info(LogHelper.getHeader(context, "autoregister", "netid=" + netid)); String email = ldap.ldapEmail; @@ -290,7 +297,7 @@ public int authenticate(Context context, email = netid + configurationService.getProperty("authentication-ldap.netid_email_domain"); } else { // We don't have a valid email address. We'll default it to 'netid' but log a warning - log.warn(LogManager.getHeader(context, "autoregister", + log.warn(LogHelper.getHeader(context, "autoregister", "Unable to locate email address for account '" + netid + "', so" + " it has been set to '" + netid + "'. " + "Please check the LDAP 'email_field' OR consider " + @@ -303,7 +310,7 @@ public int authenticate(Context context, try { eperson = ePersonService.findByEmail(context, email); if (eperson != null) { - log.info(LogManager.getHeader(context, + log.info(LogHelper.getHeader(context, "type=ldap-login", "type=ldap_but_already_email")); context.turnOffAuthorisationSystem(); eperson.setNetid(netid.toLowerCase()); @@ -311,6 +318,8 @@ public int authenticate(Context context, context.dispatchEvents(); context.restoreAuthSystemState(); context.setCurrentUser(eperson); + request.setAttribute(LDAP_AUTHENTICATED, true); + // assign user to groups based on ldap dn assignGroups(dn, ldap.ldapGroup, context); @@ -341,6 +350,8 @@ public int authenticate(Context context, ePersonService.update(context, eperson); context.dispatchEvents(); context.setCurrentUser(eperson); + request.setAttribute(LDAP_AUTHENTICATED, true); + // assign user to groups based on ldap dn assignGroups(dn, ldap.ldapGroup, context); @@ -350,12 +361,12 @@ public int authenticate(Context context, context.restoreAuthSystemState(); } - log.info(LogManager.getHeader(context, "authenticate", + log.info(LogHelper.getHeader(context, "authenticate", "type=ldap-login, created ePerson")); return SUCCESS; } else { // No auto-registration for valid certs - log.info(LogManager.getHeader(context, + log.info(LogHelper.getHeader(context, "failed_login", "type=ldap_but_no_record")); return NO_SUCH_USER; } @@ -383,7 +394,7 @@ private static class SpeakerToLDAP { protected String ldapGivenName = null; protected String ldapSurname = null; protected String ldapPhone = null; - protected String ldapGroup = null; + protected ArrayList ldapGroup = null; /** * LDAP settings @@ -398,9 +409,9 @@ private static class SpeakerToLDAP { final String ldap_surname_field; final String ldap_phone_field; final String ldap_group_field; - final boolean useTLS; + SpeakerToLDAP(Logger thelog) { ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -429,7 +440,7 @@ protected String getDNOfUser(String adminUser, String adminPassword, Context con } catch (NumberFormatException e) { // Log the error if it has been set but is invalid if (ldap_search_scope != null) { - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "ldap_authentication", "invalid search scope: " + ldap_search_scope)); } } @@ -483,6 +494,8 @@ protected String getDNOfUser(String adminUser, String adminPassword, Context con try { SearchControls ctrls = new SearchControls(); ctrls.setSearchScope(ldap_search_scope_value); + // Fetch both user attributes '*' (eg. uid, cn) and operational attributes '+' (eg. memberOf) + ctrls.setReturningAttributes(new String[] {"*", "+"}); String searchName; if (useTLS) { @@ -539,7 +552,11 @@ protected String getDNOfUser(String adminUser, String adminPassword, Context con if (attlist[4] != null) { att = atts.get(attlist[4]); if (att != null) { - ldapGroup = (String) att.get(); + // loop through all groups returned by LDAP + ldapGroup = new ArrayList(); + for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) { + ldapGroup.add((String) val.next()); + } } } @@ -548,19 +565,19 @@ protected String getDNOfUser(String adminUser, String adminPassword, Context con // Ambiguous user, can't continue } else { - log.debug(LogManager.getHeader(context, "got DN", resultDN)); + log.debug(LogHelper.getHeader(context, "got DN", resultDN)); return resultDN; } } } catch (NamingException e) { // if the lookup fails go ahead and create a new record for them because the authentication // succeeded - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "ldap_attribute_lookup", "type=failed_search " + e)); } } catch (NamingException | IOException e) { - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "ldap_authentication", "type=failed_auth " + e)); } finally { // Close the context when we're done @@ -630,7 +647,7 @@ protected boolean ldapAuthenticate(String netid, String password, } } catch (NamingException | IOException e) { // something went wrong (like wrong password) so return false - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "ldap_authentication", "type=failed_auth " + e)); return false; } finally { @@ -685,15 +702,26 @@ public String getName() { /* * Add authenticated users to the group defined in dspace.cfg by * the authentication-ldap.login.groupmap.* key. + * + * @param dn + * The string containing distinguished name of the user + * + * @param group + * List of strings with LDAP dn of groups + * + * @param context + * DSpace context */ - private void assignGroups(String dn, String group, Context context) { + private void assignGroups(String dn, ArrayList group, Context context) { if (StringUtils.isNotBlank(dn)) { System.out.println("dn:" + dn); - int i = 1; - String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i); - + int groupmapIndex = 1; + String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + groupmapIndex); boolean cmp; + + // groupmap contains the mapping of LDAP groups to DSpace groups + // outer loop with the DSpace groups while (groupMap != null) { String t[] = groupMap.split(":"); String ldapSearchString = t[0]; @@ -701,37 +729,85 @@ private void assignGroups(String dn, String group, Context context) { if (group == null) { cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); + + if (cmp) { + assignGroup(context, groupmapIndex, dspaceGroupName); + } } else { - cmp = StringUtils.equalsIgnoreCase(group, ldapSearchString); - } + // list of strings with dn from LDAP groups + // inner loop + Iterator groupIterator = group.iterator(); + while (groupIterator.hasNext()) { - if (cmp) { - // assign user to this group - try { - Group ldapGroup = groupService.findByName(context, dspaceGroupName); - if (ldapGroup != null) { - groupService.addMember(context, ldapGroup, context.getCurrentUser()); - groupService.update(context, ldapGroup); + // save the current entry from iterator for further use + String currentGroup = groupIterator.next(); + + // very much the old code from DSpace <= 7.5 + if (currentGroup == null) { + cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); } else { - // The group does not exist - log.warn(LogManager.getHeader(context, - "ldap_assignGroupsBasedOnLdapDn", - "Group defined in authentication-ldap.login.groupmap." + i - + " does not exist :: " + dspaceGroupName)); + cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString); + } + + if (cmp) { + assignGroup(context, groupmapIndex, dspaceGroupName); } - } catch (AuthorizeException ae) { - log.debug(LogManager.getHeader(context, - "assignGroupsBasedOnLdapDn could not authorize addition to " + - "group", - dspaceGroupName)); - } catch (SQLException e) { - log.debug(LogManager.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", - dspaceGroupName)); } } - groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++i); + groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++groupmapIndex); } } } + + /** + * Add the current authenticated user to the specified group + * + * @param context + * DSpace context + * + * @param groupmapIndex + * authentication-ldap.login.groupmap.* key index defined in dspace.cfg + * + * @param dspaceGroupName + * The DSpace group to add the user to + */ + private void assignGroup(Context context, int groupmapIndex, String dspaceGroupName) { + try { + Group ldapGroup = groupService.findByName(context, dspaceGroupName); + if (ldapGroup != null) { + groupService.addMember(context, ldapGroup, context.getCurrentUser()); + groupService.update(context, ldapGroup); + } else { + // The group does not exist + log.warn(LogHelper.getHeader(context, + "ldap_assignGroupsBasedOnLdapDn", + "Group defined in authentication-ldap.login.groupmap." + groupmapIndex + + " does not exist :: " + dspaceGroupName)); + } + } catch (AuthorizeException ae) { + log.debug(LogHelper.getHeader(context, + "assignGroupsBasedOnLdapDn could not authorize addition to " + + "group", + dspaceGroupName)); + } catch (SQLException e) { + log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", + dspaceGroupName)); + } + } + + @Override + public boolean isUsed(final Context context, final HttpServletRequest request) { + if (request != null && + context.getCurrentUser() != null && + request.getAttribute(LDAP_AUTHENTICATED) != null) { + return true; + } + return false; + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthentication.java new file mode 100644 index 000000000000..5d4635d48ef5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthentication.java @@ -0,0 +1,94 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authenticate; + +import java.sql.SQLException; +import java.util.List; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.kernel.ServiceManager; +import org.dspace.utils.DSpace; + +/** + * Implementation of {@link AuthenticationMethod} that delegate all the method + * invocations to the bean of class {@link OrcidAuthenticationBean}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OidcAuthentication implements AuthenticationMethod { + + private final ServiceManager serviceManager = new DSpace().getServiceManager(); + + private static final String OIDC_AUTHENTICATED = "oidc.authenticated"; + + @Override + public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException { + return getOidcAuthentication().canSelfRegister(context, request, username); + } + + @Override + public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException { + getOidcAuthentication().initEPerson(context, request, eperson); + } + + @Override + public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException { + return getOidcAuthentication().allowSetPassword(context, request, username); + } + + @Override + public boolean isImplicit() { + return getOidcAuthentication().isImplicit(); + } + + @Override + public List getSpecialGroups(Context context, HttpServletRequest request) throws SQLException { + return getOidcAuthentication().getSpecialGroups(context, request); + } + + @Override + public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) + throws SQLException { + return getOidcAuthentication().authenticate(context, username, password, realm, request); + } + + @Override + public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) { + return getOidcAuthentication().loginPageURL(context, request, response); + } + + @Override + public String getName() { + return getOidcAuthentication().getName(); + } + + private OidcAuthenticationBean getOidcAuthentication() { + return serviceManager.getServiceByName("oidcAuthentication", OidcAuthenticationBean.class); + } + + @Override + public boolean isUsed(final Context context, final HttpServletRequest request) { + if (request != null && + context.getCurrentUser() != null && + request.getAttribute(OIDC_AUTHENTICATED) != null) { + return true; + } + return false; + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthenticationBean.java b/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthenticationBean.java new file mode 100644 index 000000000000..8a4ac190c816 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authenticate/OidcAuthenticationBean.java @@ -0,0 +1,302 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authenticate; + + +import static java.lang.String.format; +import static java.net.URLEncoder.encode; +import static org.apache.commons.lang.BooleanUtils.toBoolean; +import static org.apache.commons.lang3.StringUtils.isAnyBlank; +import static org.apache.commons.lang3.StringUtils.isBlank; + +import java.io.UnsupportedEncodingException; +import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.authenticate.oidc.OidcClient; +import org.dspace.authenticate.oidc.model.OidcTokenResponseDTO; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.EPersonService; +import org.dspace.services.ConfigurationService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * OpenID Connect Authentication for DSpace. + * + * This implementation doesn't allow/needs to register user, which may be holder + * by the openID authentication server. + * + * @link https://openid.net/developers/specs/ + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +public class OidcAuthenticationBean implements AuthenticationMethod { + + public static final String OIDC_AUTH_ATTRIBUTE = "oidc"; + + private final static String LOGIN_PAGE_URL_FORMAT = "%s?client_id=%s&response_type=code&scope=%s&redirect_uri=%s"; + + private static final Logger LOGGER = LoggerFactory.getLogger(OidcAuthenticationBean.class); + + private static final String OIDC_AUTHENTICATED = "oidc.authenticated"; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private OidcClient oidcClient; + + @Autowired + private EPersonService ePersonService; + + @Override + public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException { + return false; + } + + @Override + public boolean isImplicit() { + return false; + } + + @Override + public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException { + return canSelfRegister(); + } + + @Override + public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException { + } + + @Override + public List getSpecialGroups(Context context, HttpServletRequest request) throws SQLException { + return List.of(); + } + + @Override + public String getName() { + return OIDC_AUTH_ATTRIBUTE; + } + + @Override + public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) + throws SQLException { + + if (request == null) { + LOGGER.warn("Unable to authenticate using OIDC because the request object is null."); + return BAD_ARGS; + } + + if (request.getAttribute(OIDC_AUTH_ATTRIBUTE) == null) { + return NO_SUCH_USER; + } + + String code = (String) request.getParameter("code"); + if (StringUtils.isEmpty(code)) { + LOGGER.warn("The incoming request has not code parameter"); + return NO_SUCH_USER; + } + + return authenticateWithOidc(context, code, request); + } + + private int authenticateWithOidc(Context context, String code, HttpServletRequest request) throws SQLException { + + OidcTokenResponseDTO accessToken = getOidcAccessToken(code); + if (accessToken == null) { + LOGGER.warn("No access token retrieved by code"); + return NO_SUCH_USER; + } + + Map userInfo = getOidcUserInfo(accessToken.getAccessToken()); + + String email = getAttributeAsString(userInfo, getEmailAttribute()); + if (StringUtils.isBlank(email)) { + LOGGER.warn("No email found in the user info attributes"); + return NO_SUCH_USER; + } + + EPerson ePerson = ePersonService.findByEmail(context, email); + if (ePerson != null) { + request.setAttribute(OIDC_AUTHENTICATED, true); + return ePerson.canLogIn() ? logInEPerson(context, ePerson) : BAD_ARGS; + } + + // if self registration is disabled, warn about this failure to find a matching eperson + if (! canSelfRegister()) { + LOGGER.warn("Self registration is currently disabled for OIDC, and no ePerson could be found for email: {}", + email); + } + + return canSelfRegister() ? registerNewEPerson(context, userInfo, email) : NO_SUCH_USER; + } + + @Override + public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) { + + String authorizeUrl = configurationService.getProperty("authentication-oidc.authorize-endpoint"); + String clientId = configurationService.getProperty("authentication-oidc.client-id"); + String clientSecret = configurationService.getProperty("authentication-oidc.client-secret"); + String redirectUri = configurationService.getProperty("authentication-oidc.redirect-url"); + String tokenUrl = configurationService.getProperty("authentication-oidc.token-endpoint"); + String userInfoUrl = configurationService.getProperty("authentication-oidc.user-info-endpoint"); + String[] defaultScopes = + new String[] { + "openid", "email", "profile" + }; + String scopes = String.join(" ", configurationService.getArrayProperty("authentication-oidc.scopes", + defaultScopes)); + + if (isAnyBlank(authorizeUrl, clientId, redirectUri, clientSecret, tokenUrl, userInfoUrl)) { + LOGGER.error("Missing mandatory configuration properties for OidcAuthenticationBean"); + + // prepare a Map of the properties which can not have sane defaults, but are still required + final Map map = Map.of("authorizeUrl", authorizeUrl, "clientId", clientId, "redirectUri", + redirectUri, "clientSecret", clientSecret, "tokenUrl", tokenUrl, "userInfoUrl", userInfoUrl); + final Iterator> iterator = map.entrySet().iterator(); + + while (iterator.hasNext()) { + final Entry entry = iterator.next(); + + if (isBlank(entry.getValue())) { + LOGGER.error(" * {} is missing", entry.getKey()); + } + } + return ""; + } + + try { + return format(LOGIN_PAGE_URL_FORMAT, authorizeUrl, clientId, scopes, encode(redirectUri, "UTF-8")); + } catch (UnsupportedEncodingException e) { + LOGGER.error(e.getMessage(), e); + return ""; + } + + } + + private int logInEPerson(Context context, EPerson ePerson) { + context.setCurrentUser(ePerson); + return SUCCESS; + } + + private int registerNewEPerson(Context context, Map userInfo, String email) throws SQLException { + try { + + context.turnOffAuthorisationSystem(); + + EPerson eperson = ePersonService.create(context); + + eperson.setNetid(email); + eperson.setEmail(email); + + String firstName = getAttributeAsString(userInfo, getFirstNameAttribute()); + if (firstName != null) { + eperson.setFirstName(context, firstName); + } + + String lastName = getAttributeAsString(userInfo, getLastNameAttribute()); + if (lastName != null) { + eperson.setLastName(context, lastName); + } + + eperson.setCanLogIn(true); + eperson.setSelfRegistered(true); + + ePersonService.update(context, eperson); + context.setCurrentUser(eperson); + context.dispatchEvents(); + + return SUCCESS; + + } catch (Exception ex) { + LOGGER.error("An error occurs registering a new EPerson from OIDC", ex); + return NO_SUCH_USER; + } finally { + context.restoreAuthSystemState(); + } + } + + private OidcTokenResponseDTO getOidcAccessToken(String code) { + try { + return oidcClient.getAccessToken(code); + } catch (Exception ex) { + LOGGER.error("An error occurs retriving the OIDC access_token", ex); + return null; + } + } + + private Map getOidcUserInfo(String accessToken) { + try { + return oidcClient.getUserInfo(accessToken); + } catch (Exception ex) { + LOGGER.error("An error occurs retriving the OIDC user info", ex); + return Map.of(); + } + } + + private String getAttributeAsString(Map userInfo, String attribute) { + if (isBlank(attribute)) { + return null; + } + return userInfo.containsKey(attribute) ? String.valueOf(userInfo.get(attribute)) : null; + } + + private String getEmailAttribute() { + return configurationService.getProperty("authentication-oidc.user-info.email", "email"); + } + + private String getFirstNameAttribute() { + return configurationService.getProperty("authentication-oidc.user-info.first-name", "given_name"); + } + + private String getLastNameAttribute() { + return configurationService.getProperty("authentication-oidc.user-info.last-name", "family_name"); + } + + private boolean canSelfRegister() { + String canSelfRegister = configurationService.getProperty("authentication-oidc.can-self-register", "true"); + if (isBlank(canSelfRegister)) { + return true; + } + return toBoolean(canSelfRegister); + } + + public OidcClient getOidcClient() { + return this.oidcClient; + } + + public void setOidcClient(OidcClient oidcClient) { + this.oidcClient = oidcClient; + } + + @Override + public boolean isUsed(final Context context, final HttpServletRequest request) { + if (request != null && + context.getCurrentUser() != null && + request.getAttribute(OIDC_AUTHENTICATED) != null) { + return true; + } + return false; + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthentication.java new file mode 100644 index 000000000000..3e9ff6638a61 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthentication.java @@ -0,0 +1,109 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authenticate; + +import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.dspace.authenticate.factory.AuthenticateServiceFactory; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.kernel.ServiceManager; +import org.dspace.utils.DSpace; + +/** + * Implementation of {@link AuthenticationMethod} that delegate all the method + * invocations to the bean of class {@link OrcidAuthenticationBean}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidAuthentication implements AuthenticationMethod { + + private final ServiceManager serviceManager = new DSpace().getServiceManager(); + + /** + * Check if OrcidAuthentication plugin is enabled + * @return true if enabled, false otherwise + */ + public static boolean isEnabled() { + + String pluginName = new OrcidAuthentication().getName(); + + Iterator authenticationMethodIterator = AuthenticateServiceFactory.getInstance() + .getAuthenticationService().authenticationMethodIterator(); + + while (authenticationMethodIterator.hasNext()) { + if (pluginName.equals(authenticationMethodIterator.next().getName())) { + return true; + } + } + + return false; + } + + @Override + public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException { + return getOrcidAuthentication().canSelfRegister(context, request, username); + } + + @Override + public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException { + getOrcidAuthentication().initEPerson(context, request, eperson); + } + + @Override + public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException { + return getOrcidAuthentication().allowSetPassword(context, request, username); + } + + @Override + public boolean isImplicit() { + return getOrcidAuthentication().isImplicit(); + } + + @Override + public List getSpecialGroups(Context context, HttpServletRequest request) throws SQLException { + return getOrcidAuthentication().getSpecialGroups(context, request); + } + + @Override + public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) + throws SQLException { + return getOrcidAuthentication().authenticate(context, username, password, realm, request); + } + + @Override + public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) { + return getOrcidAuthentication().loginPageURL(context, request, response); + } + + @Override + public String getName() { + return getOrcidAuthentication().getName(); + } + + private OrcidAuthenticationBean getOrcidAuthentication() { + return serviceManager.getServiceByName("orcidAuthentication", OrcidAuthenticationBean.class); + } + + @Override + public boolean isUsed(Context context, HttpServletRequest request) { + return getOrcidAuthentication().isUsed(context, request); + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthenticationBean.java b/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthenticationBean.java new file mode 100644 index 000000000000..a11bbfc867b4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authenticate/OrcidAuthenticationBean.java @@ -0,0 +1,335 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authenticate; + +import static java.lang.String.format; +import static java.net.URLEncoder.encode; +import static org.apache.commons.lang.BooleanUtils.toBoolean; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.dspace.content.Item.ANY; + +import java.io.UnsupportedEncodingException; +import java.sql.SQLException; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.EPersonService; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.profile.ResearcherProfile; +import org.dspace.profile.service.ResearcherProfileService; +import org.dspace.services.ConfigurationService; +import org.orcid.jaxb.model.v3.release.record.Email; +import org.orcid.jaxb.model.v3.release.record.Person; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * ORCID authentication for DSpace. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidAuthenticationBean implements AuthenticationMethod { + + public static final String ORCID_AUTH_ATTRIBUTE = "orcid-authentication"; + + private final static Logger LOGGER = LoggerFactory.getLogger(OrcidAuthenticationBean.class); + + private final static String LOGIN_PAGE_URL_FORMAT = "%s?client_id=%s&response_type=code&scope=%s&redirect_uri=%s"; + + @Autowired + private OrcidClient orcidClient; + + @Autowired + private OrcidConfiguration orcidConfiguration; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private EPersonService ePersonService; + + @Autowired + private ResearcherProfileService researcherProfileService; + + @Autowired + private OrcidSynchronizationService orcidSynchronizationService; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Override + public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request) + throws SQLException { + + if (request == null) { + LOGGER.warn("Unable to authenticate using ORCID because the request object is null."); + return BAD_ARGS; + } + + String code = (String) request.getParameter("code"); + if (StringUtils.isEmpty(code)) { + LOGGER.warn("The incoming request has not code parameter"); + return NO_SUCH_USER; + } + request.setAttribute(ORCID_AUTH_ATTRIBUTE, true); + return authenticateWithOrcid(context, code, request); + } + + @Override + public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) { + + String authorizeUrl = orcidConfiguration.getAuthorizeEndpointUrl(); + String clientId = orcidConfiguration.getClientId(); + String redirectUri = orcidConfiguration.getRedirectUrl(); + String scopes = String.join("+", orcidConfiguration.getScopes()); + + if (StringUtils.isAnyBlank(authorizeUrl, clientId, redirectUri, scopes)) { + LOGGER.error("Missing mandatory configuration properties for OrcidAuthentication"); + return ""; + } + + try { + return format(LOGIN_PAGE_URL_FORMAT, authorizeUrl, clientId, scopes, encode(redirectUri, "UTF-8")); + } catch (UnsupportedEncodingException e) { + LOGGER.error(e.getMessage(), e); + return ""; + } + + } + + @Override + public boolean isUsed(Context context, HttpServletRequest request) { + return request.getAttribute(ORCID_AUTH_ATTRIBUTE) != null; + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } + + @Override + public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException { + return canSelfRegister(); + } + + @Override + public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException { + + } + + @Override + public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException { + return false; + } + + @Override + public boolean isImplicit() { + return false; + } + + @Override + public List getSpecialGroups(Context context, HttpServletRequest request) throws SQLException { + return Collections.emptyList(); + } + + @Override + public String getName() { + return "orcid"; + } + + private int authenticateWithOrcid(Context context, String code, HttpServletRequest request) throws SQLException { + OrcidTokenResponseDTO token = getOrcidAccessToken(code); + if (token == null) { + return NO_SUCH_USER; + } + + String orcid = token.getOrcid(); + + EPerson ePerson = ePersonService.findByNetid(context, orcid); + if (ePerson != null) { + return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS; + } + + Person person = getPersonFromOrcid(token); + if (person == null) { + return NO_SUCH_USER; + } + + String email = getEmail(person).orElse(null); + + ePerson = ePersonService.findByEmail(context, email); + if (ePerson != null) { + return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS; + } + + return canSelfRegister() ? registerNewEPerson(context, person, token) : NO_SUCH_USER; + + } + + private int logInEPerson(Context context, OrcidTokenResponseDTO token, EPerson ePerson) + throws SQLException { + + context.setCurrentUser(ePerson); + + setOrcidMetadataOnEPerson(context, ePerson, token); + + ResearcherProfile profile = findProfile(context, ePerson); + if (profile != null) { + orcidSynchronizationService.linkProfile(context, profile.getItem(), token); + } + + return SUCCESS; + + } + + private ResearcherProfile findProfile(Context context, EPerson ePerson) throws SQLException { + try { + return researcherProfileService.findById(context, ePerson.getID()); + } catch (AuthorizeException e) { + throw new RuntimeException(e); + } + } + + private int registerNewEPerson(Context context, Person person, OrcidTokenResponseDTO token) throws SQLException { + + try { + context.turnOffAuthorisationSystem(); + + String email = getEmail(person) + .orElseThrow(() -> new IllegalStateException("The email is configured private on orcid")); + + String orcid = token.getOrcid(); + + EPerson eperson = ePersonService.create(context); + + eperson.setNetid(orcid); + + eperson.setEmail(email); + + Optional firstName = getFirstName(person); + if (firstName.isPresent()) { + eperson.setFirstName(context, firstName.get()); + } + + Optional lastName = getLastName(person); + if (lastName.isPresent()) { + eperson.setLastName(context, lastName.get()); + } + eperson.setCanLogIn(true); + eperson.setSelfRegistered(true); + + setOrcidMetadataOnEPerson(context, eperson, token); + + ePersonService.update(context, eperson); + context.setCurrentUser(eperson); + context.dispatchEvents(); + + return SUCCESS; + + } catch (Exception ex) { + LOGGER.error("An error occurs registering a new EPerson from ORCID", ex); + context.rollback(); + return NO_SUCH_USER; + } finally { + context.restoreAuthSystemState(); + } + } + + private void setOrcidMetadataOnEPerson(Context context, EPerson person, OrcidTokenResponseDTO token) + throws SQLException { + + String orcid = token.getOrcid(); + String accessToken = token.getAccessToken(); + String[] scopes = token.getScopeAsArray(); + + ePersonService.setMetadataSingleValue(context, person, "eperson", "orcid", null, null, orcid); + ePersonService.clearMetadata(context, person, "eperson", "orcid", "scope", ANY); + for (String scope : scopes) { + ePersonService.addMetadata(context, person, "eperson", "orcid", "scope", null, scope); + } + + OrcidToken orcidToken = orcidTokenService.findByEPerson(context, person); + if (orcidToken == null) { + orcidTokenService.create(context, person, accessToken); + } else { + orcidToken.setAccessToken(accessToken); + } + + } + + private Person getPersonFromOrcid(OrcidTokenResponseDTO token) { + try { + return orcidClient.getPerson(token.getAccessToken(), token.getOrcid()); + } catch (Exception ex) { + LOGGER.error("An error occurs retriving the ORCID record with id " + token.getOrcid(), ex); + return null; + } + } + + private Optional getEmail(Person person) { + List emails = person.getEmails() != null ? person.getEmails().getEmails() : Collections.emptyList(); + if (CollectionUtils.isEmpty(emails)) { + return Optional.empty(); + } + return Optional.ofNullable(emails.get(0).getEmail()); + } + + private Optional getFirstName(Person person) { + return Optional.ofNullable(person.getName()) + .map(name -> name.getGivenNames()) + .map(givenNames -> givenNames.getContent()); + } + + private Optional getLastName(Person person) { + return Optional.ofNullable(person.getName()) + .map(name -> name.getFamilyName()) + .map(givenNames -> givenNames.getContent()); + } + + private boolean canSelfRegister() { + String canSelfRegister = configurationService.getProperty("authentication-orcid.can-self-register", "true"); + if (isBlank(canSelfRegister)) { + return true; + } + return toBoolean(canSelfRegister); + } + + private OrcidTokenResponseDTO getOrcidAccessToken(String code) { + try { + return orcidClient.getAccessToken(code); + } catch (Exception ex) { + LOGGER.error("An error occurs retriving the ORCID access_token", ex); + return null; + } + } + + public OrcidClient getOrcidClient() { + return orcidClient; + } + + public void setOrcidClient(OrcidClient orcidClient) { + this.orcidClient = orcidClient; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java index 86cfb50c5f27..6d1ca862d307 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/PasswordAuthentication.java @@ -15,12 +15,14 @@ import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; import org.dspace.services.factory.DSpaceServicesFactory; /** @@ -41,7 +43,6 @@ * Basic Auth username and password to the AuthenticationManager. * * @author Larry Stone - * @version $Revision$ */ public class PasswordAuthentication implements AuthenticationMethod { @@ -49,7 +50,12 @@ public class PasswordAuthentication /** * log4j category */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(PasswordAuthentication.class); + private static final Logger log = LogManager.getLogger(); + + private static final String PASSWORD_AUTHENTICATED = "password.authenticated"; + + private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + /** @@ -142,12 +148,12 @@ public List getSpecialGroups(Context context, HttpServletRequest request) .toString())) { String groupName = DSpaceServicesFactory.getInstance().getConfigurationService() .getProperty("authentication-password.login.specialgroup"); - if ((groupName != null) && (!groupName.trim().equals(""))) { + if ((groupName != null) && !groupName.trim().isEmpty()) { Group specialGroup = EPersonServiceFactory.getInstance().getGroupService() .findByName(context, groupName); if (specialGroup == null) { // Oops - the group isn't there. - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "password_specialgroup", "Group defined in modules/authentication-password.cfg login" + ".specialgroup does not exist")); @@ -158,7 +164,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) } } } catch (Exception e) { - log.error(LogManager.getHeader(context, "getSpecialGroups", ""), e); + log.error(LogHelper.getHeader(context, "getSpecialGroups", ""), e); } return Collections.EMPTY_LIST; } @@ -181,7 +187,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) * SUCCESS, BAD_CREDENTIALS, CERT_REQUIRED, NO_SUCH_USER, BAD_ARGS *

    Meaning: *
    SUCCESS - authenticated OK. - *
    BAD_CREDENTIALS - user exists, but assword doesn't match + *
    BAD_CREDENTIALS - user exists, but password doesn't match *
    CERT_REQUIRED - not allowed to login this way without X.509 cert. *
    NO_SUCH_USER - no EPerson with matching email address. *
    BAD_ARGS - missing username, or user matched but cannot login. @@ -196,7 +202,7 @@ public int authenticate(Context context, throws SQLException { if (username != null && password != null) { EPerson eperson = null; - log.info(LogManager.getHeader(context, "authenticate", "attempting password auth of user=" + username)); + log.info(LogHelper.getHeader(context, "authenticate", "attempting password auth of user=" + username)); eperson = EPersonServiceFactory.getInstance().getEPersonService() .findByEmail(context, username.toLowerCase()); @@ -208,7 +214,7 @@ public int authenticate(Context context, return BAD_ARGS; } else if (eperson.getRequireCertificate()) { // this user can only login with x.509 certificate - log.warn(LogManager.getHeader(context, "authenticate", + log.warn(LogHelper.getHeader(context, "authenticate", "rejecting PasswordAuthentication because " + username + " requires " + "certificate.")); return CERT_REQUIRED; @@ -216,7 +222,10 @@ public int authenticate(Context context, .checkPassword(context, eperson, password)) { // login is ok if password matches: context.setCurrentUser(eperson); - log.info(LogManager.getHeader(context, "authenticate", "type=PasswordAuthentication")); + if (request != null) { + request.setAttribute(PASSWORD_AUTHENTICATED, true); + } + log.info(LogHelper.getHeader(context, "authenticate", "type=PasswordAuthentication")); return SUCCESS; } else { return BAD_CREDENTIALS; @@ -247,4 +256,23 @@ public String loginPageURL(Context context, public String getName() { return "password"; } + + + @Override + public boolean isUsed(final Context context, final HttpServletRequest request) { + if (request != null && + context.getCurrentUser() != null && + request.getAttribute(PASSWORD_AUTHENTICATED) != null) { + return true; + } + return false; + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + if (context == null || ePerson == null) { + return false; + } + return ePersonService.checkPassword(context, ePerson, currentPassword); + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/ShibAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/ShibAuthentication.java index 53502a22ce8d..791634a7dc25 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/ShibAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/ShibAuthentication.java @@ -20,7 +20,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.UUID; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @@ -235,7 +234,7 @@ public int authenticate(Context context, String username, String password, // Step 4: Log the user in. context.setCurrentUser(eperson); - request.getSession().setAttribute("shib.authenticated", true); + request.setAttribute("shib.authenticated", true); AuthenticateServiceFactory.getInstance().getAuthenticationService().initEPerson(context, request, eperson); log.info(eperson.getEmail() + " has been authenticated via shibboleth."); @@ -290,20 +289,13 @@ public List getSpecialGroups(Context context, HttpServletRequest request) try { // User has not successfuly authenticated via shibboleth. if (request == null || - context.getCurrentUser() == null || - request.getSession().getAttribute("shib.authenticated") == null) { + context.getCurrentUser() == null) { return Collections.EMPTY_LIST; } - // If we have already calculated the special groups then return them. - if (request.getSession().getAttribute("shib.specialgroup") != null) { + if (context.getSpecialGroups().size() > 0 ) { log.debug("Returning cached special groups."); - List sessionGroupIds = (List) request.getSession().getAttribute("shib.specialgroup"); - List result = new ArrayList<>(); - for (UUID uuid : sessionGroupIds) { - result.add(groupService.find(context, uuid)); - } - return result; + return context.getSpecialGroups(); } log.debug("Starting to determine special groups"); @@ -396,16 +388,8 @@ public List getSpecialGroups(Context context, HttpServletRequest request) log.info("Added current EPerson to special groups: " + groups); - List groupIds = new ArrayList<>(); - for (Group group : groups) { - groupIds.add(group.getID()); - } - - // Cache the special groups, so we don't have to recalculate them again - // for this session. - request.getSession().setAttribute("shib.specialgroup", groupIds); - return new ArrayList<>(groups); + } catch (Throwable t) { log.error("Unable to validate any sepcial groups this user may belong too because of an exception.", t); return Collections.EMPTY_LIST; @@ -515,7 +499,7 @@ public String loginPageURL(Context context, HttpServletRequest request, HttpServ } // Determine the server return URL, where shib will send the user after authenticating. - // We need it to go back to DSpace's ShibbolethRestController so we will extract the user's information, + // We need it to trigger DSpace's ShibbolethLoginFilter so we will extract the user's information, // locally authenticate them & then redirect back to the UI. String returnURL = configurationService.getProperty("dspace.server.url") + "/api/authn/shibboleth" + ((redirectUrl != null) ? "?redirectUrl=" + redirectUrl : ""); @@ -1283,5 +1267,19 @@ private String getShibURL(HttpServletRequest request) { } + @Override + public boolean isUsed(final Context context, final HttpServletRequest request) { + if (request != null && + context.getCurrentUser() != null && + request.getAttribute("shib.authenticated") != null) { + return true; + } + return false; + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/X509Authentication.java b/dspace-api/src/main/java/org/dspace/authenticate/X509Authentication.java index df9e76711618..12dc5feda583 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/X509Authentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/X509Authentication.java @@ -35,7 +35,7 @@ import org.dspace.authenticate.service.AuthenticationService; import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.factory.EPersonServiceFactory; @@ -128,6 +128,8 @@ public class X509Authentication implements AuthenticationMethod { protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private static final String X509_AUTHENTICATED = "x509.authenticated"; + /** * Initialization: Set caPublicKey and/or keystore. This loads the @@ -286,7 +288,7 @@ private static boolean isValid(Context context, X509Certificate certificate) { try { certificate.checkValidity(); } catch (CertificateException e) { - log.info(LogManager.getHeader(context, "authentication", + log.info(LogHelper.getHeader(context, "authentication", "X.509 Certificate is EXPIRED or PREMATURE: " + e.toString())); return false; @@ -298,7 +300,7 @@ private static boolean isValid(Context context, X509Certificate certificate) { certificate.verify(caPublicKey); return true; } catch (GeneralSecurityException e) { - log.info(LogManager.getHeader(context, "authentication", + log.info(LogHelper.getHeader(context, "authentication", "X.509 Certificate FAILED SIGNATURE check: " + e.toString())); } @@ -322,11 +324,11 @@ private static boolean isValid(Context context, X509Certificate certificate) { } } log - .info(LogManager + .info(LogHelper .getHeader(context, "authentication", "Keystore method FAILED SIGNATURE check on client cert.")); } catch (GeneralSecurityException e) { - log.info(LogManager.getHeader(context, "authentication", + log.info(LogHelper.getHeader(context, "authentication", "X.509 Certificate FAILED SIGNATURE check: " + e.toString())); } @@ -461,7 +463,7 @@ public List getSpecialGroups(Context context, HttpServletRequest request) if (group != null) { groups.add(group); } else { - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "configuration_error", "unknown_group=" + groupName)); } @@ -513,7 +515,7 @@ public int authenticate(Context context, String username, String password, try { if (!isValid(context, certs[0])) { log - .warn(LogManager + .warn(LogHelper .getHeader(context, "authenticate", "type=x509certificate, status=BAD_CREDENTIALS (not valid)")); return BAD_CREDENTIALS; @@ -530,7 +532,7 @@ public int authenticate(Context context, String username, String password, if (email != null && canSelfRegister(context, request, null)) { // Register the new user automatically - log.info(LogManager.getHeader(context, "autoregister", + log.info(LogHelper.getHeader(context, "autoregister", "from=x.509, email=" + email)); // TEMPORARILY turn off authorisation @@ -544,30 +546,32 @@ && canSelfRegister(context, request, null)) { context.dispatchEvents(); context.restoreAuthSystemState(); context.setCurrentUser(eperson); + request.setAttribute(X509_AUTHENTICATED, true); setSpecialGroupsFlag(request, email); return SUCCESS; } else { // No auto-registration for valid certs log - .warn(LogManager + .warn(LogHelper .getHeader(context, "authenticate", "type=cert_but_no_record, cannot auto-register")); return NO_SUCH_USER; } } else if (!eperson.canLogIn()) { // make sure this is a login account - log.warn(LogManager.getHeader(context, "authenticate", + log.warn(LogHelper.getHeader(context, "authenticate", "type=x509certificate, email=" + email + ", canLogIn=false, rejecting.")); return BAD_ARGS; } else { - log.info(LogManager.getHeader(context, "login", + log.info(LogHelper.getHeader(context, "login", "type=x509certificate")); context.setCurrentUser(eperson); + request.setAttribute(X509_AUTHENTICATED, true); setSpecialGroupsFlag(request, email); return SUCCESS; } } catch (AuthorizeException ce) { - log.warn(LogManager.getHeader(context, "authorize_exception", + log.warn(LogHelper.getHeader(context, "authorize_exception", ""), ce); } @@ -594,4 +598,19 @@ public String loginPageURL(Context context, HttpServletRequest request, public String getName() { return "x509"; } + + @Override + public boolean isUsed(final Context context, final HttpServletRequest request) { + if (request != null && + context.getCurrentUser() != null && + request.getAttribute(X509_AUTHENTICATED) != null) { + return true; + } + return false; + } + + @Override + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword) { + return false; + } } diff --git a/dspace-api/src/main/java/org/dspace/authenticate/oidc/OidcClient.java b/dspace-api/src/main/java/org/dspace/authenticate/oidc/OidcClient.java new file mode 100644 index 000000000000..8656574ee9f9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authenticate/oidc/OidcClient.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authenticate.oidc; + +import java.util.Map; + +import org.dspace.authenticate.oidc.model.OidcTokenResponseDTO; + +/** + * Client to interact with the configured OIDC provider. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OidcClient { + + /** + * Exchange the authorization code for a 3-legged access token. The + * authorization code expires upon use. + * + * @param code the authorization code + * @return the OIDC token + * @throws OidcClientException if some error occurs during the exchange + */ + OidcTokenResponseDTO getAccessToken(String code) throws OidcClientException; + + /** + * Retrieve the info related to the user associated with the given accessToken + * from the user info endpoint. + * + * @param accessToken the access token + * @return a map with the user infos + * @throws OidcClientException if some error occurs during the exchange + */ + Map getUserInfo(String accessToken) throws OidcClientException; + +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/oidc/OidcClientException.java b/dspace-api/src/main/java/org/dspace/authenticate/oidc/OidcClientException.java new file mode 100644 index 000000000000..4f9bc5fdc9bb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authenticate/oidc/OidcClientException.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authenticate.oidc; + +/** + * Exception throwable from class that implements {@link OidcClient} in case of + * error response from the OIDC provider. + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OidcClientException extends RuntimeException { + + private static final long serialVersionUID = -7618061110212398216L; + + private int status = 0; + + public OidcClientException(int status, String content) { + super(content); + this.status = status; + } + + public OidcClientException(Throwable cause) { + super(cause); + } + + public int getStatus() { + return this.status; + } +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/oidc/impl/OidcClientImpl.java b/dspace-api/src/main/java/org/dspace/authenticate/oidc/impl/OidcClientImpl.java new file mode 100644 index 000000000000..ddab01e8cb5d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authenticate/oidc/impl/OidcClientImpl.java @@ -0,0 +1,155 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authenticate.oidc.impl; + +import static java.nio.charset.StandardCharsets.UTF_8; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import javax.annotation.PostConstruct; + +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.io.IOUtils; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.NameValuePair; +import org.apache.http.client.HttpClient; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.client.methods.RequestBuilder; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicNameValuePair; +import org.dspace.authenticate.oidc.OidcClient; +import org.dspace.authenticate.oidc.OidcClientException; +import org.dspace.authenticate.oidc.model.OidcTokenResponseDTO; +import org.dspace.services.ConfigurationService; +import org.dspace.util.ThrowingSupplier; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OidcClient}. + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OidcClientImpl implements OidcClient { + + @Autowired + private ConfigurationService configurationService; + + private final ObjectMapper objectMapper = new ObjectMapper(); + + @PostConstruct + private void setup() { + objectMapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); + } + + @Override + public OidcTokenResponseDTO getAccessToken(String code) throws OidcClientException { + List params = new ArrayList(); + params.add(new BasicNameValuePair("code", code)); + params.add(new BasicNameValuePair("grant_type", "authorization_code")); + params.add(new BasicNameValuePair("client_id", getClientId())); + params.add(new BasicNameValuePair("client_secret", getClientSecret())); + params.add(new BasicNameValuePair("redirect_uri", getRedirectUrl())); + + HttpUriRequest httpUriRequest = RequestBuilder.post(getTokenEndpointUrl()) + .addHeader("Content-Type", "application/x-www-form-urlencoded") + .addHeader("Accept", "application/json") + .setEntity(new UrlEncodedFormEntity(params, Charset.defaultCharset())) + .build(); + + return executeAndParseJson(httpUriRequest, OidcTokenResponseDTO.class); + + } + + @Override + @SuppressWarnings("unchecked") + public Map getUserInfo(String accessToken) throws OidcClientException { + + HttpUriRequest httpUriRequest = RequestBuilder.get(getUserInfoEndpointUrl()) + .addHeader("Authorization", "Bearer " + accessToken) + .build(); + + return executeAndParseJson(httpUriRequest, Map.class); + } + + private T executeAndParseJson(HttpUriRequest httpUriRequest, Class clazz) { + + HttpClient client = HttpClientBuilder.create().build(); + + return executeAndReturns(() -> { + + HttpResponse response = client.execute(httpUriRequest); + + if (isNotSuccessfull(response)) { + throw new OidcClientException(getStatusCode(response), formatErrorMessage(response)); + } + + return objectMapper.readValue(getContent(response), clazz); + + }); + + } + + private T executeAndReturns(ThrowingSupplier supplier) { + try { + return supplier.get(); + } catch (OidcClientException ex) { + throw ex; + } catch (Exception ex) { + throw new OidcClientException(ex); + } + } + + private String formatErrorMessage(HttpResponse response) { + try { + return IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset()); + } catch (UnsupportedOperationException | IOException e) { + return "Generic error"; + } + } + + private boolean isNotSuccessfull(HttpResponse response) { + int statusCode = getStatusCode(response); + return statusCode < 200 || statusCode > 299; + } + + private int getStatusCode(HttpResponse response) { + return response.getStatusLine().getStatusCode(); + } + + private String getContent(HttpResponse response) throws UnsupportedOperationException, IOException { + HttpEntity entity = response.getEntity(); + return entity != null ? IOUtils.toString(entity.getContent(), UTF_8.name()) : null; + } + + private String getClientId() { + return configurationService.getProperty("authentication-oidc.client-id"); + } + + private String getClientSecret() { + return configurationService.getProperty("authentication-oidc.client-secret"); + } + + private String getTokenEndpointUrl() { + return configurationService.getProperty("authentication-oidc.token-endpoint"); + } + + private String getUserInfoEndpointUrl() { + return configurationService.getProperty("authentication-oidc.user-info-endpoint"); + } + + private String getRedirectUrl() { + return configurationService.getProperty("authentication-oidc.redirect-url"); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/authenticate/oidc/model/OidcTokenResponseDTO.java b/dspace-api/src/main/java/org/dspace/authenticate/oidc/model/OidcTokenResponseDTO.java new file mode 100644 index 000000000000..d2c2efc987bc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authenticate/oidc/model/OidcTokenResponseDTO.java @@ -0,0 +1,115 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authenticate.oidc.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * This class map the response from and OpenID Connect token endpoint. + * {@link https://openid.net/specs/openid-connect-core-1_0.html} + * + * Response example: + * + * { "access_token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCIsIm9yZ...", "id_token": + * "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGki...", "token_type": "bearer", + * "expires_in": 28800, "scope": "pgc-role email openid profile" } + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * + */ +public class OidcTokenResponseDTO { + + /** + * The access token release by the authorization server this is the most + * relevant item, because it allow the server to access to the user resources as + * defined in the scopes {@link https://tools.ietf.org/html/rfc6749#section-1.4} + */ + @JsonProperty("access_token") + private String accessToken; + + /** + * The id token as defined in the OpenID connect standard + * {@link https://openid.net/specs/openid-connect-core-1_0.html#IDToken} + */ + @JsonProperty("id_token") + private String idToken; + + /** + * The refresh token as defined in the OAuth standard + * {@link https://tools.ietf.org/html/rfc6749#section-1.5} + */ + @JsonProperty("refresh_token") + private String refreshToken; + + /** + * It will be "bearer" + */ + @JsonProperty("token_type") + private String tokenType; + + /** + * The expiration timestamp in millis + */ + @JsonProperty("expires_in") + private Long expiresIn; + + /** + * List of scopes {@link https://tools.ietf.org/html/rfc6749#section-3.3} + */ + @JsonProperty("scope") + private String scope; + + public String getAccessToken() { + return accessToken; + } + + public void setAccessToken(String accessToken) { + this.accessToken = accessToken; + } + + public String getIdToken() { + return idToken; + } + + public void setIdToken(String idToken) { + this.idToken = idToken; + } + + public String getRefreshToken() { + return refreshToken; + } + + public void setRefreshToken(String refreshToken) { + this.refreshToken = refreshToken; + } + + public String getTokenType() { + return tokenType; + } + + public void setTokenType(String tokenType) { + this.tokenType = tokenType; + } + + public Long getExpiresIn() { + return expiresIn; + } + + public void setExpiresIn(Long expiresIn) { + this.expiresIn = expiresIn; + } + + public String getScope() { + return scope; + } + + public void setScope(String scope) { + this.scope = scope; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/authenticate/service/AuthenticationService.java b/dspace-api/src/main/java/org/dspace/authenticate/service/AuthenticationService.java index d4a1cd5d0db8..e955302ec3d7 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/service/AuthenticationService.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/service/AuthenticationService.java @@ -168,4 +168,25 @@ public List getSpecialGroups(Context context, */ public Iterator authenticationMethodIterator(); + /** + * Retrieves the currently used authentication method name based on the context and the request + * + * @param context A valid DSpace context. + * @param request The request that started this operation, or null if not applicable. + * @return the currently used authentication method name + */ + public String getAuthenticationMethod(Context context, HttpServletRequest request); + + /** + * Check if the given current password is valid to change the password of the + * given ePerson. + * + * @param context The DSpace context + * @param ePerson the ePerson related to the password change + * @param currentPassword The current password to check + * @return true if the provided password matches with current + * password + */ + public boolean canChangePassword(Context context, EPerson ePerson, String currentPassword); + } diff --git a/dspace-api/src/main/java/org/dspace/authority/AuthoritySearchService.java b/dspace-api/src/main/java/org/dspace/authority/AuthoritySearchService.java index 3c956e00091e..c0373e944264 100644 --- a/dspace-api/src/main/java/org/dspace/authority/AuthoritySearchService.java +++ b/dspace-api/src/main/java/org/dspace/authority/AuthoritySearchService.java @@ -16,6 +16,8 @@ import org.apache.solr.client.solrj.response.QueryResponse; /** + * Manage queries of the Solr authority core. + * * @author Antoine Snyers (antoine at atmire.com) * @author Kevin Van de Velde (kevin at atmire dot com) * @author Ben Bosman (ben at atmire dot com) @@ -26,6 +28,13 @@ public interface AuthoritySearchService { public QueryResponse search(SolrQuery query) throws SolrServerException, MalformedURLException, IOException; + /** + * Retrieves all the metadata fields which are indexed in the authority control. + * + * @return names of indexed fields. + * @throws SolrServerException passed through. + * @throws MalformedURLException passed through. + */ public List getAllIndexedMetadataFields() throws Exception; } diff --git a/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java index 8d9ab0c5fc4c..ca5b4a11b543 100644 --- a/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java @@ -11,6 +11,8 @@ import java.net.MalformedURLException; import java.util.ArrayList; import java.util.List; +import javax.inject.Inject; +import javax.inject.Named; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -22,6 +24,7 @@ import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrInputDocument; import org.dspace.authority.indexer.AuthorityIndexingService; +import org.dspace.service.impl.HttpConnectionPoolService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -35,6 +38,9 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho private static final Logger log = LogManager.getLogger(AuthoritySolrServiceImpl.class); + @Inject @Named("solrHttpConnectionPoolService") + private HttpConnectionPoolService httpConnectionPoolService; + protected AuthoritySolrServiceImpl() { } @@ -44,7 +50,7 @@ protected AuthoritySolrServiceImpl() { */ protected SolrClient solr = null; - protected SolrClient getSolr() + public SolrClient getSolr() throws MalformedURLException, SolrServerException, IOException { if (solr == null) { @@ -54,12 +60,18 @@ protected SolrClient getSolr() log.debug("Solr authority URL: " + solrService); - HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService).build(); + HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService) + .withHttpClient(httpConnectionPoolService.getClient()) + .build(); solrServer.setBaseURL(solrService); SolrQuery solrQuery = new SolrQuery().setQuery("*:*"); - solrServer.query(solrQuery); + try { + solrServer.query(solrQuery); + } catch (Exception ex) { + log.error("An error occurs querying authority solr core", ex); + } solr = solrServer; } diff --git a/dspace-api/src/main/java/org/dspace/authority/AuthorityValue.java b/dspace-api/src/main/java/org/dspace/authority/AuthorityValue.java index 10a608bb7660..6ca0292fdb1b 100644 --- a/dspace-api/src/main/java/org/dspace/authority/AuthorityValue.java +++ b/dspace-api/src/main/java/org/dspace/authority/AuthorityValue.java @@ -9,6 +9,10 @@ import java.sql.SQLException; import java.text.DateFormat; +import java.time.DateTimeException; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; @@ -16,6 +20,7 @@ import java.util.Map; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrInputDocument; @@ -25,9 +30,6 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Context; import org.dspace.util.SolrUtils; -import org.joda.time.DateTime; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; /** * @author Antoine Snyers (antoine at atmire.com) @@ -192,7 +194,7 @@ public void updateItem(Context context, Item currentItem, MetadataValue value) } /** - * Information that can be used the choice ui + * Information that can be used the choice ui. * * @return map */ @@ -200,42 +202,51 @@ public Map choiceSelectMap() { return new HashMap<>(); } - - public List getDateFormatters() { - List list = new ArrayList<>(); - list.add(ISODateTimeFormat.dateTime()); - list.add(ISODateTimeFormat.dateTimeNoMillis()); + /** + * Build a list of ISO date formatters to parse various forms. + * + *

    Note: any formatter which does not parse a zone or + * offset must have a default zone set. See {@link stringToDate}. + * + * @return the formatters. + */ + static private List getDateFormatters() { + List list = new ArrayList<>(); + list.add(java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.SSS]X")); + list.add(java.time.format.DateTimeFormatter.ISO_LOCAL_DATE_TIME + .withZone(ZoneId.systemDefault().normalized())); return list; } - public Date stringToDate(String date) { + /** + * Convert a date string to internal form, trying several parsers. + * + * @param date serialized date to be converted. + * @return converted date, or null if no parser accepted the input. + */ + static public Date stringToDate(String date) { Date result = null; if (StringUtils.isNotBlank(date)) { - List dateFormatters = getDateFormatters(); - boolean converted = false; - int formatter = 0; - while (!converted) { + for (DateTimeFormatter formatter : getDateFormatters()) { try { - DateTimeFormatter dateTimeFormatter = dateFormatters.get(formatter); - DateTime dateTime = dateTimeFormatter.parseDateTime(date); - result = dateTime.toDate(); - converted = true; - } catch (IllegalArgumentException e) { - formatter++; - if (formatter > dateFormatters.size()) { - converted = true; - } - log.error("Could not find a valid date format for: \"" + date + "\"", e); + ZonedDateTime dateTime = ZonedDateTime.parse(date, formatter); + result = Date.from(dateTime.toInstant()); + break; + } catch (DateTimeException e) { + log.debug("Input '{}' did not match {}", date, formatter); } } } + if (null == result) { + log.error("Could not find a valid date format for: \"{}\"", date); + } return result; } /** * log4j logger */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AuthorityValue.class); + private static Logger log = LogManager.getLogger(); @Override public String toString() { @@ -272,6 +283,10 @@ public AuthorityValue newInstance(String info) { return new AuthorityValue(); } + /** + * Get the type of authority which created this value. + * @return type name. + */ public String getAuthorityType() { return "internal"; } diff --git a/dspace-api/src/main/java/org/dspace/authority/AuthorityValueServiceImpl.java b/dspace-api/src/main/java/org/dspace/authority/AuthorityValueServiceImpl.java index fc62f0df1997..70005ecbeea6 100644 --- a/dspace-api/src/main/java/org/dspace/authority/AuthorityValueServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authority/AuthorityValueServiceImpl.java @@ -21,7 +21,7 @@ import org.dspace.authority.service.AuthorityValueService; import org.dspace.content.authority.SolrAuthority; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.springframework.beans.factory.annotation.Autowired; /** @@ -220,7 +220,7 @@ protected List find(Context context, String queryString) { } } } catch (Exception e) { - log.error(LogManager.getHeader(context, "Error while retrieving AuthorityValue from solr", + log.error(LogHelper.getHeader(context, "Error while retrieving AuthorityValue from solr", "query: " + queryString), e); } diff --git a/dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv3SolrAuthorityImpl.java b/dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv3SolrAuthorityImpl.java index a1c3867fb9d3..6753a5d113b7 100644 --- a/dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv3SolrAuthorityImpl.java +++ b/dspace-api/src/main/java/org/dspace/authority/orcid/Orcidv3SolrAuthorityImpl.java @@ -21,7 +21,8 @@ import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authority.AuthorityValue; import org.dspace.authority.SolrAuthorityInterface; import org.dspace.external.OrcidRestConnector; @@ -40,7 +41,7 @@ */ public class Orcidv3SolrAuthorityImpl implements SolrAuthorityInterface { - private static Logger log = Logger.getLogger(Orcidv3SolrAuthorityImpl.class); + private final static Logger log = LogManager.getLogger(); private OrcidRestConnector orcidRestConnector; private String OAUTHUrl; diff --git a/dspace-api/src/main/java/org/dspace/authority/service/AuthorityService.java b/dspace-api/src/main/java/org/dspace/authority/service/AuthorityService.java index 42cbe2d68614..21822993ef82 100644 --- a/dspace-api/src/main/java/org/dspace/authority/service/AuthorityService.java +++ b/dspace-api/src/main/java/org/dspace/authority/service/AuthorityService.java @@ -14,14 +14,21 @@ import org.dspace.core.Context; /** - * Service interface class for the Metadata Authority - * The implementation of this class is responsible for all business logic calls for the Metadata Authority and is - * autowired by spring + * Service interface class for the Metadata Authority. + * The implementation of this class is responsible for all business logic calls + * for the Metadata Authority and is autowired by Spring. * * @author kevinvandevelde at atmire.com */ public interface AuthorityService { + /** + * Add an {@link Item} to the authority index. + * @param context current DSpace session. + * @param item the Item to be added. + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + */ public void indexItem(Context context, Item item) throws SQLException, AuthorizeException; public boolean isConfigurationValid(); diff --git a/dspace-api/src/main/java/org/dspace/authority/util/XMLUtils.java b/dspace-api/src/main/java/org/dspace/authority/util/XMLUtils.java index 77568205afb3..6cf49ac65b22 100644 --- a/dspace-api/src/main/java/org/dspace/authority/util/XMLUtils.java +++ b/dspace-api/src/main/java/org/dspace/authority/util/XMLUtils.java @@ -14,11 +14,12 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; -import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; import org.apache.logging.log4j.Logger; -import org.apache.xpath.XPathAPI; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; @@ -62,36 +63,26 @@ public static String getTextContent(Node xml, String singleNodeXPath) throws XPa /** * @param xml The starting context (a Node or a Document, for example). - * @param NodeListXPath xpath + * @param nodeListXPath xpath * @return A Node matches the NodeListXPath * null if nothing matches the NodeListXPath * @throws XPathExpressionException if xpath error */ - public static Node getNode(Node xml, String NodeListXPath) throws XPathExpressionException { - Node result = null; - try { - result = XPathAPI.selectSingleNode(xml, NodeListXPath); - } catch (TransformerException e) { - log.error("Error", e); - } - return result; + public static Node getNode(Node xml, String nodeListXPath) throws XPathExpressionException { + XPath xPath = XPathFactory.newInstance().newXPath(); + return (Node) xPath.compile(nodeListXPath).evaluate(xml, XPathConstants.NODE); } /** * @param xml The starting context (a Node or a Document, for example). - * @param NodeListXPath xpath + * @param nodeListXPath xpath * @return A NodeList containing the nodes that match the NodeListXPath * null if nothing matches the NodeListXPath * @throws XPathExpressionException if xpath error */ - public static NodeList getNodeList(Node xml, String NodeListXPath) throws XPathExpressionException { - NodeList nodeList = null; - try { - nodeList = XPathAPI.selectNodeList(xml, NodeListXPath); - } catch (TransformerException e) { - log.error("Error", e); - } - return nodeList; + public static NodeList getNodeList(Node xml, String nodeListXPath) throws XPathExpressionException { + XPath xPath = XPathFactory.newInstance().newXPath(); + return (NodeList) xPath.compile(nodeListXPath).evaluate(xml, XPathConstants.NODESET); } public static Iterator getNodeListIterator(Node xml, String NodeListXPath) throws XPathExpressionException { diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index f2252022db89..beff6fdc48e8 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -7,11 +7,13 @@ */ package org.dspace.authorize; +import static org.dspace.app.util.AuthorizeUtil.canCollectionAdminManageAccounts; +import static org.dspace.app.util.AuthorizeUtil.canCommunityAdminManageAccounts; + import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; -import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -29,16 +31,19 @@ import org.dspace.content.Item; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverQuery.SORT_ORDER; import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; @@ -62,7 +67,7 @@ */ public class AuthorizeServiceImpl implements AuthorizeService { - private static Logger log = LogManager.getLogger(AuthorizeServiceImpl.class); + private static final Logger log = LogManager.getLogger(); @Autowired(required = true) protected BitstreamService bitstreamService; @@ -243,7 +248,7 @@ protected boolean authorize(Context c, DSpaceObject o, int action, EPerson e, bo // If authorization was given before and cached Boolean cachedResult = c.getCachedAuthorizationResult(o, action, e); if (cachedResult != null) { - return cachedResult.booleanValue(); + return cachedResult; } // is eperson set? if not, userToCheck = null (anonymous) @@ -251,13 +256,8 @@ protected boolean authorize(Context c, DSpaceObject o, int action, EPerson e, bo if (e != null) { userToCheck = e; - // perform isAdmin check to see - // if user is an Admin on this object - DSpaceObject adminObject = useInheritance ? serviceFactory.getDSpaceObjectService(o) - .getAdminObject(c, o, action) : null; - - if (isAdmin(c, e, adminObject)) { - c.cacheAuthorizedAction(o, action, e, true, null); + // perform immediately isAdmin check as this is cheap + if (isAdmin(c, e)) { return true; } } @@ -281,13 +281,15 @@ protected boolean authorize(Context c, DSpaceObject o, int action, EPerson e, bo ignoreCustomPolicies = !isAnyItemInstalled(c, Arrays.asList(((Bundle) o))); } if (o instanceof Item) { - if (workspaceItemService.findByItem(c, (Item) o) != null || - workflowItemService.findByItem(c, (Item) o) != null) { + // the isArchived check is fast and would exclude the possibility that the item + // is a workspace or workflow without further queries + if (!((Item) o).isArchived() && + (workspaceItemService.findByItem(c, (Item) o) != null || + workflowItemService.findByItem(c, (Item) o) != null)) { ignoreCustomPolicies = true; } } - for (ResourcePolicy rp : getPoliciesActionFilter(c, o, action)) { if (ignoreCustomPolicies @@ -308,7 +310,7 @@ protected boolean authorize(Context c, DSpaceObject o, int action, EPerson e, bo } if ((rp.getGroup() != null) - && (groupService.isMember(c, e, rp.getGroup()))) { + && groupService.isMember(c, e, rp.getGroup())) { // group was set, and eperson is a member // of that group c.cacheAuthorizedAction(o, action, e, true, rp); @@ -323,6 +325,16 @@ protected boolean authorize(Context c, DSpaceObject o, int action, EPerson e, bo } } + if (e != null) { + // if user is an Admin on this object + DSpaceObject adminObject = useInheritance ? serviceFactory.getDSpaceObjectService(o) + .getAdminObject(c, o, action) : null; + + if (isAdmin(c, e, adminObject)) { + c.cacheAuthorizedAction(o, action, e, true, null); + return true; + } + } // default authorization is denial c.cacheAuthorizedAction(o, action, e, false, null); return false; @@ -366,7 +378,7 @@ public boolean isAdmin(Context c, EPerson e, DSpaceObject o) throws SQLException Boolean cachedResult = c.getCachedAuthorizationResult(o, Constants.ADMIN, e); if (cachedResult != null) { - return cachedResult.booleanValue(); + return cachedResult; } // @@ -383,7 +395,7 @@ public boolean isAdmin(Context c, EPerson e, DSpaceObject o) throws SQLException } if ((rp.getGroup() != null) - && (groupService.isMember(c, e, rp.getGroup()))) { + && groupService.isMember(c, e, rp.getGroup())) { // group was set, and eperson is a member // of that group c.cacheAuthorizedAction(o, Constants.ADMIN, e, true, rp); @@ -439,7 +451,7 @@ public boolean isAdmin(Context c, EPerson e) throws SQLException { if (e == null) { return false; // anonymous users can't be admins.... } else { - return groupService.isMember(c, e, Group.ADMIN); + return groupService.isMember(c, e, c.getAdminGroup()); } } @@ -502,15 +514,25 @@ public void inheritPolicies(Context c, DSpaceObject src, List policies = getPolicies(c, src); //Only inherit non-ADMIN policies (since ADMIN policies are automatically inherited) - List nonAdminPolicies = new ArrayList(); + //and non-custom policies as these are manually applied when appropriate + List nonAdminPolicies = new ArrayList<>(); for (ResourcePolicy rp : policies) { - if (rp.getAction() != Constants.ADMIN) { + if (rp.getAction() != Constants.ADMIN && !StringUtils.equals(rp.getRpType(), ResourcePolicy.TYPE_CUSTOM)) { nonAdminPolicies.add(rp); } } addPolicies(c, nonAdminPolicies, dest); } + @Override + public void replaceAllPolicies(Context context, DSpaceObject source, DSpaceObject dest) + throws SQLException, AuthorizeException { + // find all policies for the source object + List policies = getPolicies(context, source); + removeAllPolicies(context, dest); + addPolicies(context, policies, dest); + } + @Override public void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int toAction) throws SQLException, AuthorizeException { @@ -525,7 +547,7 @@ public void switchPoliciesAction(Context context, DSpaceObject dso, int fromActi public void addPolicies(Context c, List policies, DSpaceObject dest) throws SQLException, AuthorizeException { // now add them to the destination object - List newPolicies = new LinkedList<>(); + List newPolicies = new ArrayList<>(policies.size()); for (ResourcePolicy srp : policies) { ResourcePolicy rp = resourcePolicyService.create(c); @@ -600,7 +622,7 @@ public List getAuthorizedGroups(Context c, DSpaceObject o, int actionID) throws java.sql.SQLException { List policies = getPoliciesActionFilter(c, o, actionID); - List groups = new ArrayList(); + List groups = new ArrayList<>(); for (ResourcePolicy resourcePolicy : policies) { if (resourcePolicy.getGroup() != null && resourcePolicyService.isDateValid(resourcePolicy)) { groups.add(resourcePolicy.getGroup()); @@ -633,60 +655,6 @@ public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group g } } - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context The relevant DSpace Context. - * @param embargoDate embargo end date - * @param reason embargo reason - * @param dso DSpace object - * @param owningCollection collection to get group policies from - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - @Override - public void generateAutomaticPolicies(Context context, Date embargoDate, - String reason, DSpaceObject dso, Collection owningCollection) - throws SQLException, AuthorizeException { - - if (embargoDate != null || (embargoDate == null && dso instanceof Bitstream)) { - - List authorizedGroups = getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ); - - removeAllPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM); - - // look for anonymous - boolean isAnonymousInPlace = false; - for (Group g : authorizedGroups) { - if (StringUtils.equals(g.getName(), Group.ANONYMOUS)) { - isAnonymousInPlace = true; - } - } - if (!isAnonymousInPlace) { - // add policies for all the groups - for (Group g : authorizedGroups) { - ResourcePolicy rp = createOrModifyPolicy(null, context, null, g, null, embargoDate, Constants.READ, - reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - - } else { - // add policy just for anonymous - ResourcePolicy rp = createOrModifyPolicy(null, context, null, - groupService.findByName(context, Group.ANONYMOUS), null, - embargoDate, Constants.READ, reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - } - } - @Override public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException { @@ -768,6 +736,7 @@ public List getPoliciesActionFilterExceptRpType(Context c, DSpac * @param context context with the current user * @return true if the current user is a community admin in the site * false when this is not the case, or an exception occurred + * @throws java.sql.SQLException passed through. */ @Override public boolean isCommunityAdmin(Context context) throws SQLException { @@ -780,18 +749,33 @@ public boolean isCommunityAdmin(Context context) throws SQLException { * @param context context with the current user * @return true if the current user is a collection admin in the site * false when this is not the case, or an exception occurred + * @throws java.sql.SQLException passed through. */ @Override public boolean isCollectionAdmin(Context context) throws SQLException { return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE); } + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + * @throws java.sql.SQLException passed through. + */ + @Override + public boolean isItemAdmin(Context context) throws SQLException { + return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE); + } + /** * Checks that the context's current user is a community or collection admin in the site. * * @param context context with the current user * @return true if the current user is a community or collection admin in the site * false when this is not the case, or an exception occurred + * @throws java.sql.SQLException passed through. */ @Override public boolean isComColAdmin(Context context) throws SQLException { @@ -817,7 +801,7 @@ public List findAdminAuthorizedCommunity(Context context, String quer query = formatCustomQuery(query); DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" + IndexableCommunity.TYPE, - offset, limit); + offset, limit, null, null); for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) { Community community = ((IndexableCommunity) solrCollections).getIndexedObject(); communities.add(community); @@ -839,7 +823,7 @@ public long countAdminAuthorizedCommunity(Context context, String query) query = formatCustomQuery(query); DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" + IndexableCommunity.TYPE, - null, null); + null, null, null, null); return discoverResult.getTotalSearchResults(); } @@ -864,7 +848,7 @@ public List findAdminAuthorizedCollection(Context context, String qu query = formatCustomQuery(query); DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" + IndexableCollection.TYPE, - offset, limit); + offset, limit, CollectionService.SOLR_SORT_FIELD, SORT_ORDER.asc); for (IndexableObject solrCollections : discoverResult.getIndexableObjects()) { Collection collection = ((IndexableCollection) solrCollections).getIndexedObject(); collections.add(collection); @@ -886,29 +870,40 @@ public long countAdminAuthorizedCollection(Context context, String query) query = formatCustomQuery(query); DiscoverResult discoverResult = getDiscoverResult(context, query + "search.resourcetype:" + IndexableCollection.TYPE, - null, null); + null, null, null, null); return discoverResult.getTotalSearchResults(); } + @Override + public boolean isAccountManager(Context context) { + try { + return (canCommunityAdminManageAccounts() && isCommunityAdmin(context) + || canCollectionAdminManageAccounts() && isCollectionAdmin(context)); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + private boolean performCheck(Context context, String query) throws SQLException { if (context.getCurrentUser() == null) { return false; } try { - DiscoverResult discoverResult = getDiscoverResult(context, query, null, null); + DiscoverResult discoverResult = getDiscoverResult(context, query, null, null, null, null); if (discoverResult.getTotalSearchResults() > 0) { return true; } } catch (SearchServiceException e) { - log.error("Failed getting getting community/collection admin status for " + log.error("Failed getting community/collection admin status for " + context.getCurrentUser().getEmail() + " The search error is: " + e.getMessage() + " The search resourceType filter was: " + query); } return false; } - private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit) + private DiscoverResult getDiscoverResult(Context context, String query, Integer offset, Integer limit, + String sortField, SORT_ORDER sortOrder) throws SearchServiceException, SQLException { String groupQuery = getGroupToQuery(groupService.allMemberGroups(context, context.getCurrentUser())); @@ -924,7 +919,9 @@ private DiscoverResult getDiscoverResult(Context context, String query, Integer if (limit != null) { discoverQuery.setMaxResults(limit); } - + if (sortField != null && sortOrder != null) { + discoverQuery.setSortField(sortField, sortOrder); + } return searchService.search(context, discoverQuery); } diff --git a/dspace-api/src/main/java/org/dspace/authorize/RegexPasswordValidator.java b/dspace-api/src/main/java/org/dspace/authorize/RegexPasswordValidator.java new file mode 100644 index 000000000000..d12c3ba91929 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/RegexPasswordValidator.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authorize; + +import static org.apache.commons.lang.StringUtils.isNotBlank; + +import java.util.regex.Pattern; + +import org.dspace.authorize.service.PasswordValidatorService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link PasswordValidatorService} that verifies if the given + * passowrd matches the configured pattern. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +public class RegexPasswordValidator implements PasswordValidatorService { + + @Autowired + private ConfigurationService configurationService; + + @Override + public boolean isPasswordValidationEnabled() { + return isNotBlank(getPasswordValidationPattern()); + } + + @Override + public boolean isPasswordValid(String password) { + if (!isPasswordValidationEnabled()) { + return true; + } + + Pattern pattern = Pattern.compile(getPasswordValidationPattern()); + return pattern.matcher(password).find(); + } + + private String getPasswordValidationPattern() { + return configurationService.getProperty("authentication-password.regex-validation.pattern"); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java index b84055b8b0f5..c781400bae45 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java @@ -24,6 +24,7 @@ import javax.persistence.Temporal; import javax.persistence.TemporalType; +import org.apache.solr.common.StringUtils; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; import org.dspace.core.ReloadableEntity; @@ -40,9 +41,16 @@ @Entity @Table(name = "resourcepolicy") public class ResourcePolicy implements ReloadableEntity { + /** This policy was set on submission, to give the submitter access. */ public static String TYPE_SUBMISSION = "TYPE_SUBMISSION"; + + /** This policy was set to allow access by a workflow group. */ public static String TYPE_WORKFLOW = "TYPE_WORKFLOW"; + + /** This policy was explicitly set on this object. */ public static String TYPE_CUSTOM = "TYPE_CUSTOM"; + + /** This policy was copied from the containing object's default policies. */ public static String TYPE_INHERITED = "TYPE_INHERITED"; @Id @@ -92,7 +100,7 @@ public class ResourcePolicy implements ReloadableEntity { private String rptype; @Lob - @Type(type = "org.hibernate.type.MaterializedClobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "rpdescription") private String rpdescription; @@ -120,6 +128,9 @@ public boolean equals(Object obj) { return false; } final ResourcePolicy other = (ResourcePolicy) obj; + if (!StringUtils.equals(getRpName(), other.getRpName())) { + return false; + } if (getAction() != other.getAction()) { return false; } @@ -169,6 +180,7 @@ public int hashCode() { * * @return the internal identifier */ + @Override public Integer getID() { return id; } diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java index 4a2addf781b9..b762107a84c5 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicyServiceImpl.java @@ -232,6 +232,15 @@ public void removePolicies(Context c, DSpaceObject o, String type) throws SQLExc c.restoreAuthSystemState(); } + @Override + public void removePolicies(Context c, DSpaceObject o, String type, int action) + throws SQLException, AuthorizeException { + resourcePolicyDAO.deleteByDsoAndTypeAndAction(c, o, type, action); + c.turnOffAuthorisationSystem(); + contentServiceFactory.getDSpaceObjectService(o).updateLastModified(c, o); + c.restoreAuthSystemState(); + } + @Override public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group) throws SQLException, AuthorizeException { diff --git a/dspace-api/src/main/java/org/dspace/authorize/ValidatePasswordServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/ValidatePasswordServiceImpl.java new file mode 100644 index 000000000000..663308d627fd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/ValidatePasswordServiceImpl.java @@ -0,0 +1,33 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authorize; + +import java.util.List; + +import org.dspace.authorize.service.PasswordValidatorService; +import org.dspace.authorize.service.ValidatePasswordService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Basic implementation for validation password robustness. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class ValidatePasswordServiceImpl implements ValidatePasswordService { + + @Autowired + private List validators; + + @Override + public boolean isPasswordValid(String password) { + return validators.stream() + .filter(passwordValidator -> passwordValidator.isPasswordValidationEnabled()) + .allMatch(passwordValidator -> passwordValidator.isPasswordValid(password)); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java b/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java index 5c898a5bca61..4e12cd0bfd66 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java +++ b/dspace-api/src/main/java/org/dspace/authorize/dao/ResourcePolicyDAO.java @@ -39,6 +39,9 @@ public List findByDsoAndType(Context context, DSpaceObject dSpac public List findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException; + public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dSpaceObject, String type, int action) + throws SQLException; + public List findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java b/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java index 9dd368d667df..26b6bb1d7345 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/dao/impl/ResourcePolicyDAOImpl.java @@ -8,12 +8,14 @@ package org.dspace.authorize.dao.impl; import java.sql.SQLException; +import java.util.LinkedList; import java.util.List; import java.util.UUID; import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Join; +import javax.persistence.criteria.Order; import javax.persistence.criteria.Root; import org.dspace.authorize.ResourcePolicy; @@ -60,6 +62,9 @@ public List findByDsoAndType(Context context, DSpaceObject dso, criteriaBuilder.equal(resourcePolicyRoot.get(ResourcePolicy_.rptype), type) ) ); + List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.asc(resourcePolicyRoot.get(ResourcePolicy_.id))); + criteriaQuery.orderBy(orderList); return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1); } @@ -98,6 +103,19 @@ public List findByDSoAndAction(Context context, DSpaceObject dso return list(context, criteriaQuery, false, ResourcePolicy.class, -1, -1); } + @Override + public void deleteByDsoAndTypeAndAction(Context context, DSpaceObject dso, String type, int actionId) + throws SQLException { + String queryString = "delete from ResourcePolicy where dSpaceObject.id = :dsoId " + + "AND rptype = :rptype AND actionId= :actionId"; + Query query = createQuery(context, queryString); + query.setParameter("dsoId", dso.getID()); + query.setParameter("rptype", type); + query.setParameter("actionId", actionId); + query.executeUpdate(); + + } + @Override public List findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException { diff --git a/dspace-api/src/main/java/org/dspace/authorize/package-info.java b/dspace-api/src/main/java/org/dspace/authorize/package-info.java new file mode 100644 index 000000000000..f36c39cfe351 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/package-info.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +/** + * Represents permissions for access to DSpace content. + * + *

    Philosophy

    + * DSpace's authorization system follows the classical "police state" + * philosophy of security - the user can do nothing, unless it is + * specifically allowed. Those permissions are spelled out with + * {@link ResourcePolicy} objects, stored in the {@code resourcepolicy} table + * in the database. + * + *

    Policies are attached to Content

    + * Resource Policies get assigned to all of the content objects in + * DSpace - collections, communities, items, bundles, and bitstreams. + * (Currently they are not attached to non-content objects such as + * {@code EPerson} or {@code Group}. But they could be, hence the name + * {@code ResourcePolicy} instead of {@code ContentPolicy}.) + * + *

    Policies are tuples

    + * Authorization is based on evaluating the tuple of (object, action, actor), + * such as (ITEM, READ, EPerson John Smith) to check if the {@code EPerson} + * "John Smith" can read an item. {@code ResourcePolicy} objects are pretty + * simple, describing a single instance of (object, action, actor). If + * multiple actors are desired, such as groups 10, 11, and 12 are allowed to + * READ Item 13, you simply create a {@code ResourcePolicy} for each group. + * + *

    Built-in groups

    + * The install process should create two built-in groups - {@code Anonymous} + * for anonymous/public access, and {@code Administrators} for administrators. + * Group {@code Anonymous} allows anyone access, even if not authenticated. + * Group {@code Administrators}' members have super-user rights, + * and are allowed to do any action to any object. + * + *

    Policy types + * Policies have a "type" used to distinguish policies which are applied for + * specific purposes. + *
    + *
    CUSTOM
    + *
    These are created and assigned explicitly by users.
    + *
    INHERITED
    + *
    These are copied from a containing object's default policies.
    + *
    SUBMISSION
    + *
    These are applied during submission to give the submitter access while + * composing a submission.
    + *
    WORKFLOW
    + *
    These are automatically applied during workflow, to give curators + * access to submissions in their curation queues. They usually have an + * automatically-created workflow group as the actor.
    + * + *

    Start and End dates

    + * A policy may have a start date and/or an end date. The policy is + * considered not valid before the start date or after the end date. No date + * means do not apply the related test. For example, embargo until a given + * date can be expressed by a READ policy with a given start date, and a + * limited-time offer by a READ policy with a given end date. + * + * @author dstuve + * @author mwood + */ +package org.dspace.authorize; diff --git a/dspace-api/src/main/java/org/dspace/authorize/package.html b/dspace-api/src/main/java/org/dspace/authorize/package.html deleted file mode 100644 index 66ce0f824773..000000000000 --- a/dspace-api/src/main/java/org/dspace/authorize/package.html +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - -

    Handles permissions for DSpace content. -

    - -

    Philosophy
    -DSpace's authorization system follows the classical "police state" -philosophy of security - the user can do nothing, unless it is -specifically allowed. Those permissions are spelled out with -ResourcePolicy objects, stored in the resourcepolicy table in the -database. -

    - -

    Policies are attached to Content

    -

    Policies are attached to Content
    -Resource Policies get assigned to all of the content objects in -DSpace - collections, communities, items, bundles, and bitstreams. -(Currently they are not attached to non-content objects such as EPerson -or Group. But they could be, hence the name ResourcePolicy instead of -ContentPolicy.) -

    - -

    Policies are tuples

    -Authorization is based on evaluating the tuple of (object, action, who), -such as (ITEM, READ, EPerson John Smith) to check if the EPerson "John Smith" -can read an item. ResourcePolicy objects are pretty simple, describing a single instance of -(object, action, who). If multiple who's are desired, such as Groups 10, 11, and -12 are allowed to READ Item 13, you simply create a ResourcePolicy for each -group. -

    - -

    Special Groups

    -The install process should create two special groups - group 0, for -anonymous/public access, and group 1 for administrators. -Group 0 (public/anonymous) allows anyone access, even if they are not -authenticated. Group 1's (admin) members have super-user rights, and -are allowed to do any action to any object. -

    - -

    Unused ResourcePolicy attributes

    -ResourcePolicies have a few attributes that are currently unused, -but are included with the intent that they will be used someday. -One is start and end dates, for when policies will be active, so that -permissions for content can change over time. The other is the EPerson - -policies could apply to only a single EPerson, but for ease of -administration currently a Group is the recommended unit to use to -describe 'who'. -

    - - - diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 9f6171a22030..e0a94833d76c 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -470,24 +470,6 @@ public boolean isAnIdenticalPolicyAlreadyInPlace(Context c, DSpaceObject o, Grou public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action) throws SQLException; - - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context current context - * @param embargoDate date - * @param reason reason - * @param dso DSpaceObject - * @param owningCollection collection - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - public void generateAutomaticPolicies(Context context, Date embargoDate, String reason, DSpaceObject dso, - Collection owningCollection) throws SQLException, AuthorizeException; - public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException; @@ -532,6 +514,15 @@ void switchPoliciesAction(Context context, DSpaceObject dso, int fromAction, int */ boolean isCollectionAdmin(Context context) throws SQLException; + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + */ + boolean isItemAdmin(Context context) throws SQLException; + /** * Checks that the context's current user is a community or collection admin in the site. * @@ -592,4 +583,25 @@ List findAdminAuthorizedCollection(Context context, String query, in */ long countAdminAuthorizedCollection(Context context, String query) throws SearchServiceException, SQLException; + + /** + * Returns true if the current user can manage accounts. + * + * @param context context with the current user + * @return true if the current user can manage accounts + */ + boolean isAccountManager(Context context); + + /** + * Replace all the policies in the target object with exactly the same policies that exist in the source object + * + * @param context DSpace Context + * @param source source of policies + * @param dest destination of inherited policies + * @throws SQLException if there's a database problem + * @throws AuthorizeException if the current user is not authorized to add these policies + */ + public void replaceAllPolicies(Context context, DSpaceObject source, DSpaceObject dest) + throws SQLException, AuthorizeException; + } diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/PasswordValidatorService.java b/dspace-api/src/main/java/org/dspace/authorize/service/PasswordValidatorService.java new file mode 100644 index 000000000000..5817969b6d8f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/service/PasswordValidatorService.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authorize.service; + +/** + * Interface for classes that validate a given password with a specific + * strategy. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +public interface PasswordValidatorService { + + /** + * Check if the password validator is active. + */ + public boolean isPasswordValidationEnabled(); + + /** + * This method checks whether the password is valid + * + * @param password password to validate + */ + public boolean isPasswordValid(String password); +} diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java index f1d8b30242a7..43735fcd6089 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/ResourcePolicyService.java @@ -53,12 +53,19 @@ public List find(Context c, EPerson e, List groups, int a throws SQLException; /** - * Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID. - * This method can be used to detect duplicate ResourcePolicies. + * Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring + * IDs with a specific PolicyID. This method can be used to detect duplicate + * ResourcePolicies. * - * @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies. - * @return List of resource policies for the same DSpaceObject, group and action but other policyID. - * @throws SQLException + * @param context current DSpace session. + * @param dso find policies for this object. + * @param group find policies referring to this group. + * @param action find policies for this action. + * @param notPolicyID ResourcePolicies with this ID will be ignored while + * looking out for equal ResourcePolicies. + * @return List of resource policies for the same DSpaceObject, group and + * action but other policyID. + * @throws SQLException passed through. */ public List findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) @@ -68,6 +75,16 @@ public List findByTypeGroupActionExceptId(Context context, DSpac public boolean isDateValid(ResourcePolicy resourcePolicy); + /** + * Create and persist a copy of a given ResourcePolicy, with an empty + * dSpaceObject field. + * + * @param context current DSpace session. + * @param resourcePolicy the policy to be copied. + * @return the copy. + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + */ public ResourcePolicy clone(Context context, ResourcePolicy resourcePolicy) throws SQLException, AuthorizeException; public void removeAllPolicies(Context c, DSpaceObject o) throws SQLException, AuthorizeException; @@ -76,6 +93,9 @@ public List findByTypeGroupActionExceptId(Context context, DSpac public void removePolicies(Context c, DSpaceObject o, String type) throws SQLException, AuthorizeException; + public void removePolicies(Context c, DSpaceObject o, String type, int action) + throws SQLException, AuthorizeException; + public void removeDsoGroupPolicies(Context context, DSpaceObject dso, Group group) throws SQLException, AuthorizeException; @@ -117,6 +137,7 @@ public List findExceptRpType(Context c, DSpaceObject o, int acti * @param ePerson ePerson whose policies want to find * @param offset the position of the first result to return * @param limit paging limit + * @return some of the policies referring to {@code ePerson}. * @throws SQLException if database error */ public List findByEPerson(Context context, EPerson ePerson, int offset, int limit) diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/ValidatePasswordService.java b/dspace-api/src/main/java/org/dspace/authorize/service/ValidatePasswordService.java new file mode 100644 index 000000000000..0d5f6191f660 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/authorize/service/ValidatePasswordService.java @@ -0,0 +1,25 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authorize.service; + +/** + * Services to use during Validating of password. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public interface ValidatePasswordService { + + /** + * This method checks whether the password is valid based on the configured + * rules/strategies. + * + * @param password password to validate + */ + public boolean isPasswordValid(String password); + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java index 22cf02fe1321..03130e39e78b 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseDAO.java @@ -8,8 +8,8 @@ package org.dspace.browse; import java.util.List; -import java.util.UUID; +import org.dspace.content.DSpaceObject; import org.dspace.content.Item; /** @@ -140,21 +140,21 @@ public interface BrowseDAO { public void setAscending(boolean ascending); /** - * Get the database ID of the container object. The container object will be a + * Get the container object. The container object will be a * Community or a Collection. * - * @return the database id of the container, or -1 if none is set + * @return the container, or null if none is set */ - public UUID getContainerID(); + public DSpaceObject getContainer(); /** - * Set the database id of the container object. This should be the id of a - * Community or Collection. This will constrain the results of the browse - * to only items or values within items that appear in the given container. + * Set the container object. This should be a Community or Collection. + * This will constrain the results of the browse to only items or values within items that appear in the given + * container and add the related configuration default filters. * - * @param containerID community/collection internal ID (UUID) + * @param container community/collection */ - public void setContainerID(UUID containerID); + public void setContainer(DSpaceObject container); /** * get the name of the field in which to look for the container id. This is @@ -346,7 +346,7 @@ public interface BrowseDAO { public String getFilterValueField(); /** - * Set he name of the field in which the value to constrain results is + * Set the name of the field in which the value to constrain results is * contained * * @param valueField the name of the field diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java b/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java index 302d46eb0df4..351c36248209 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseEngine.java @@ -17,7 +17,7 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.sort.OrderFormat; import org.dspace.sort.SortOption; @@ -85,7 +85,7 @@ public BrowseEngine(Context context) */ public BrowseInfo browse(BrowserScope bs) throws BrowseException { - log.debug(LogManager.getHeader(context, "browse", "")); + log.debug(LogHelper.getHeader(context, "browse", "")); // first, load the browse scope into the object this.scope = bs; @@ -119,7 +119,7 @@ public BrowseInfo browse(BrowserScope bs) */ public BrowseInfo browseMini(BrowserScope bs) throws BrowseException { - log.info(LogManager.getHeader(context, "browse_mini", "")); + log.info(LogHelper.getHeader(context, "browse_mini", "")); // load the scope into the object this.scope = bs; @@ -141,12 +141,12 @@ public BrowseInfo browseMini(BrowserScope bs) Collection col = (Collection) scope.getBrowseContainer(); dao.setContainerTable("collection2item"); dao.setContainerIDField("collection_id"); - dao.setContainerID(col.getID()); + dao.setContainer(col); } else if (scope.inCommunity()) { Community com = (Community) scope.getBrowseContainer(); dao.setContainerTable("communities2item"); dao.setContainerIDField("community_id"); - dao.setContainerID(com.getID()); + dao.setContainer(com); } } @@ -198,11 +198,18 @@ public BrowseInfo browseMini(BrowserScope bs) */ private BrowseInfo browseByItem(BrowserScope bs) throws BrowseException { - log.info(LogManager.getHeader(context, "browse_by_item", "")); + log.info(LogHelper.getHeader(context, "browse_by_item", "")); try { // get the table name that we are going to be getting our data from dao.setTable(browseIndex.getTableName()); + if (scope.getBrowseIndex() != null && OrderFormat.TITLE.equals(scope.getBrowseIndex().getDataType())) { + // For browsing by title, apply the same normalization applied to indexed titles + dao.setStartsWith(normalizeJumpToValue(scope.getStartsWith())); + } else { + dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith())); + } + // tell the browse query whether we are ascending or descending on the value dao.setAscending(scope.isAscending()); @@ -240,18 +247,15 @@ private BrowseInfo browseByItem(BrowserScope bs) Collection col = (Collection) scope.getBrowseContainer(); dao.setContainerTable("collection2item"); dao.setContainerIDField("collection_id"); - dao.setContainerID(col.getID()); + dao.setContainer(col); } else if (scope.inCommunity()) { Community com = (Community) scope.getBrowseContainer(); dao.setContainerTable("communities2item"); dao.setContainerIDField("community_id"); - dao.setContainerID(com.getID()); + dao.setContainer(com); } } - // this is the total number of results in answer to the query - int total = getTotalResults(); - // assemble the ORDER BY clause String orderBy = browseIndex.getSortField(scope.isSecondLevel()); if (scope.getSortBy() > 0) { @@ -259,6 +263,9 @@ private BrowseInfo browseByItem(BrowserScope bs) } dao.setOrderField(orderBy); + // this is the total number of results in answer to the query + int total = getTotalResults(); + int offset = scope.getOffset(); String rawFocusValue = null; if (offset < 1 && (scope.hasJumpToItem() || scope.hasJumpToValue() || scope.hasStartsWith())) { @@ -270,9 +277,6 @@ private BrowseInfo browseByItem(BrowserScope bs) String focusValue = normalizeJumpToValue(rawFocusValue); log.debug("browsing using focus: " + focusValue); - - // Convert the focus value into an offset - offset = getOffsetForValue(focusValue); } dao.setOffset(offset); @@ -291,7 +295,7 @@ private BrowseInfo browseByItem(BrowserScope bs) // now, if we don't have any results, we are at the end of the browse. This will // be because a starts_with value has been supplied for which we don't have // any items. - if (results.size() == 0) { + if (results.isEmpty()) { // In this case, we will calculate a new offset for the last page of results offset = total - scope.getResultsPerPage(); if (offset < 0) { @@ -374,14 +378,14 @@ private BrowseInfo browseByItem(BrowserScope bs) */ private BrowseInfo browseByValue(BrowserScope bs) throws BrowseException { - log.info(LogManager.getHeader(context, "browse_by_value", "focus=" + bs.getJumpToValue())); + log.info(LogHelper.getHeader(context, "browse_by_value", "focus=" + bs.getJumpToValue())); try { // get the table name that we are going to be getting our data from // this is the distinct table constrained to either community or collection dao.setTable(browseIndex.getDistinctTableName()); - dao.setStartsWith(StringUtils.lowerCase(scope.getStartsWith())); + dao.setStartsWith(normalizeJumpToValue(scope.getStartsWith())); // remind the DAO that this is a distinct value browse, so it knows what sort // of query to build dao.setDistinct(true); @@ -409,12 +413,12 @@ private BrowseInfo browseByValue(BrowserScope bs) Collection col = (Collection) scope.getBrowseContainer(); dao.setContainerTable("collection2item"); dao.setContainerIDField("collection_id"); - dao.setContainerID(col.getID()); + dao.setContainer(col); } else if (scope.inCommunity()) { Community com = (Community) scope.getBrowseContainer(); dao.setContainerTable("communities2item"); dao.setContainerIDField("community_id"); - dao.setContainerID(com.getID()); + dao.setContainer(com); } } @@ -451,7 +455,7 @@ private BrowseInfo browseByValue(BrowserScope bs) // now, if we don't have any results, we are at the end of the browse. This will // be because a starts_with value has been supplied for which we don't have // any items. - if (results.size() == 0) { + if (results.isEmpty()) { // In this case, we will calculate a new offset for the last page of results offset = total - scope.getResultsPerPage(); if (offset < 0) { @@ -464,7 +468,7 @@ private BrowseInfo browseByValue(BrowserScope bs) } } else { // No records, so make an empty list - results = new ArrayList(); + results = new ArrayList<>(); } // construct the BrowseInfo object to pass back @@ -518,17 +522,17 @@ private BrowseInfo browseByValue(BrowserScope bs) */ private String getJumpToValue() throws BrowseException { - log.debug(LogManager.getHeader(context, "get_focus_value", "")); + log.debug(LogHelper.getHeader(context, "get_focus_value", "")); // if the focus is by value, just return it if (scope.hasJumpToValue()) { - log.debug(LogManager.getHeader(context, "get_focus_value_return", "return=" + scope.getJumpToValue())); + log.debug(LogHelper.getHeader(context, "get_focus_value_return", "return=" + scope.getJumpToValue())); return scope.getJumpToValue(); } // if the focus is to start with, then we need to return the value of the starts with if (scope.hasStartsWith()) { - log.debug(LogManager.getHeader(context, "get_focus_value_return", "return=" + scope.getStartsWith())); + log.debug(LogHelper.getHeader(context, "get_focus_value_return", "return=" + scope.getStartsWith())); return scope.getStartsWith(); } @@ -555,7 +559,7 @@ private String getJumpToValue() } String col = "sort_1"; - if (so.getNumber() > 0) { + if (so != null && so.getNumber() > 0) { col = "sort_" + Integer.toString(so.getNumber()); } @@ -565,7 +569,7 @@ private String getJumpToValue() // item (I think) String max = dao.doMaxQuery(col, tableName, id); - log.debug(LogManager.getHeader(context, "get_focus_value_return", "return=" + max)); + log.debug(LogHelper.getHeader(context, "get_focus_value_return", "return=" + max)); return max; } @@ -592,7 +596,7 @@ private int getOffsetForValue(String value) } String col = "sort_1"; - if (so.getNumber() > 0) { + if (so != null && so.getNumber() > 0) { col = "sort_" + Integer.toString(so.getNumber()); } @@ -671,7 +675,7 @@ private int getTotalResults() */ private int getTotalResults(boolean distinct) throws SQLException, BrowseException { - log.debug(LogManager.getHeader(context, "get_total_results", "distinct=" + distinct)); + log.debug(LogHelper.getHeader(context, "get_total_results", "distinct=" + distinct)); // tell the browse query whether we are distinct dao.setDistinct(distinct); @@ -685,13 +689,11 @@ private int getTotalResults(boolean distinct) // our count, storing them locally to reinstate later String focusField = dao.getJumpToField(); String focusValue = dao.getJumpToValue(); - String orderField = dao.getOrderField(); int limit = dao.getLimit(); int offset = dao.getOffset(); dao.setJumpToField(null); dao.setJumpToValue(null); - dao.setOrderField(null); dao.setLimit(-1); dao.setOffset(-1); @@ -701,12 +703,11 @@ private int getTotalResults(boolean distinct) // now put back the values we removed for this method dao.setJumpToField(focusField); dao.setJumpToValue(focusValue); - dao.setOrderField(orderField); dao.setLimit(limit); dao.setOffset(offset); dao.setCountValues(null); - log.debug(LogManager.getHeader(context, "get_total_results_return", "return=" + count)); + log.debug(LogHelper.getHeader(context, "get_total_results_return", "return=" + count)); return count; } diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java index 859063272a7c..6c38c8dd664b 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java @@ -22,11 +22,13 @@ * This class holds all the information about a specifically configured * BrowseIndex. It is responsible for parsing the configuration, understanding * about what sort options are available, and what the names of the database - * tables that hold all the information are actually called. + * tables that hold all the information are actually called. Hierarchical browse + * indexes also contain information about the vocabulary they're using, see: + * {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex} * * @author Richard Jones */ -public final class BrowseIndex { +public class BrowseIndex { /** the configuration number, as specified in the config */ /** * used for single metadata browse tables for generating the table name @@ -102,7 +104,7 @@ private BrowseIndex() { * * @param baseName The base of the table name */ - private BrowseIndex(String baseName) { + protected BrowseIndex(String baseName) { try { number = -1; tableBaseName = baseName; @@ -313,14 +315,6 @@ public String getName() { return name; } - /** - * @param name The name to set. - */ -// public void setName(String name) -// { -// this.name = name; -// } - /** * Get the SortOption associated with this index. * diff --git a/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java b/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java index aa30862e3c34..ec4cb199ea1d 100644 --- a/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java +++ b/dspace-api/src/main/java/org/dspace/browse/CrossLinks.java @@ -59,7 +59,16 @@ public CrossLinks() * @return true/false */ public boolean hasLink(String metadata) { - return links.containsKey(metadata); + return findLinkType(metadata) != null; + } + + /** + * Is there a link for the given browse name (eg 'author') + * @param browseIndexName + * @return true/false + */ + public boolean hasBrowseName(String browseIndexName) { + return links.containsValue(browseIndexName); } /** @@ -69,6 +78,41 @@ public boolean hasLink(String metadata) { * @return type */ public String getLinkType(String metadata) { - return links.get(metadata); + return findLinkType(metadata); + } + + /** + * Get full map of field->indexname link configurations + * @return + */ + public Map getLinks() { + return links; + } + + /** + * Find and return the browse name for a given metadata field. + * If the link key contains a wildcard eg dc.subject.*, it should + * match dc.subject.other, etc. + * @param metadata + * @return + */ + public String findLinkType(String metadata) { + // Resolve wildcards properly, eg. dc.subject.other matches a configuration for dc.subject.* + for (String key : links.keySet()) { + if (null != key && key.endsWith(".*")) { + // A substring of length-1, also substracting the wildcard should work as a "startsWith" + // check for the field eg. dc.subject.* -> dc.subject is the start of dc.subject.other + if (null != metadata && metadata.startsWith(key.substring(0, key.length() - 1 - ".*".length()))) { + return links.get(key); + } + } else { + // Exact match, if the key field has no .* wildcard + if (links.containsKey(metadata)) { + return links.get(metadata); + } + } + } + // No match + return null; } } diff --git a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java index c9c140fb0b5b..20c43fc37298 100644 --- a/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java +++ b/dspace-api/src/main/java/org/dspace/browse/ItemCounter.java @@ -18,6 +18,7 @@ import org.dspace.core.Context; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.web.ContextUtil; /** * This class provides a standard interface to all item counting @@ -49,9 +50,20 @@ public class ItemCounter { */ private Context context; + /** + * This field is used to hold singular instance of a class. + * Singleton pattern is used but this class should be + * refactored to modern DSpace approach (injectible service). + */ + + private static ItemCounter instance; + protected ItemService itemService; protected ConfigurationService configurationService; + private boolean showStrengths; + private boolean useCache; + /** * Construct a new item counter which will use the given DSpace Context * @@ -63,21 +75,42 @@ public ItemCounter(Context context) throws ItemCountException { this.dao = ItemCountDAOFactory.getInstance(this.context); this.itemService = ContentServiceFactory.getInstance().getItemService(); this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + this.showStrengths = configurationService.getBooleanProperty("webui.strengths.show", false); + this.useCache = configurationService.getBooleanProperty("webui.strengths.cache", true); } /** - * Get the count of the items in the given container. If the configuration - * value webui.strengths.cache is equal to 'true' this will return the - * cached value if it exists. If it is equal to 'false' it will count - * the number of items in the container in real time. + * Get the singular instance of a class. + * It creates a new instance at the first usage of this method. + * + * @return instance af a class + * @throws ItemCountException when error occurs + */ + public static ItemCounter getInstance() throws ItemCountException { + if (instance == null) { + instance = new ItemCounter(ContextUtil.obtainCurrentRequestContext()); + } + return instance; + } + + /** + * Get the count of the items in the given container. If the configuration + * value webui.strengths.show is equal to 'true' this method will return all + * archived items. If the configuration value webui.strengths.show is equal to + * 'false' this method will return -1. + * If the configuration value webui.strengths.cache + * is equal to 'true' this will return the cached value if it exists. + * If it is equal to 'false' it will count the number of items + * in the container in real time. * * @param dso DSpaceObject * @return count * @throws ItemCountException when error occurs */ public int getCount(DSpaceObject dso) throws ItemCountException { - boolean useCache = configurationService.getBooleanProperty( - "webui.strengths.cache", true); + if (!showStrengths) { + return -1; + } if (useCache) { return dao.getCount(dso); diff --git a/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java b/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java index 9cbbe8f19429..6a63659c82b2 100644 --- a/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java +++ b/dspace-api/src/main/java/org/dspace/browse/ItemListConfig.java @@ -25,22 +25,7 @@ public class ItemListConfig { /** * a map of column number to metadata value */ - private Map metadata = new HashMap(); - - /** - * a map of column number to data type - */ - private Map types = new HashMap(); - - /** - * constant for a DATE column - */ - private static final int DATE = 1; - - /** - * constant for a TEXT column - */ - private static final int TEXT = 2; + private Map metadata = new HashMap<>(); private final transient ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -63,14 +48,11 @@ public ItemListConfig() // parse the config int i = 1; for (String token : browseFields) { - Integer key = Integer.valueOf(i); + Integer key = i; // find out if the field is a date if (token.indexOf("(date)") > 0) { token = token.replaceAll("\\(date\\)", ""); - types.put(key, Integer.valueOf(ItemListConfig.DATE)); - } else { - types.put(key, Integer.valueOf(ItemListConfig.TEXT)); } String[] mdBits = interpretField(token.trim(), null); @@ -100,7 +82,7 @@ public int numCols() { * @return array of metadata */ public String[] getMetadata(int col) { - return metadata.get(Integer.valueOf(col)); + return metadata.get(col); } /** diff --git a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java index 6a960e8d75ea..f99aab852bf5 100644 --- a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java @@ -8,17 +8,17 @@ package org.dspace.browse; import java.io.Serializable; -import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; -import java.util.UUID; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.apache.solr.client.solrj.util.ClientUtils; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.discovery.DiscoverFacetField; @@ -30,6 +30,8 @@ import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.services.factory.DSpaceServicesFactory; @@ -123,9 +125,9 @@ public int compare(Object o1, Object o2) { private String containerIDField = null; /** - * the database id of the container we are constraining to + * the container we are constraining to */ - private UUID containerID = null; + private DSpaceObject container = null; /** * the column that we are sorting results by @@ -175,7 +177,7 @@ private DiscoverResult getSolrResponse() throws BrowseException { if (sResponse == null) { DiscoverQuery query = new DiscoverQuery(); addLocationScopeFilter(query); - addStatusFilter(query); + addDefaultFilterQueries(query); if (distinct) { DiscoverFacetField dff; if (StringUtils.isNotBlank(startsWith)) { @@ -205,6 +207,10 @@ private DiscoverResult getSolrResponse() throws BrowseException { } else if (valuePartial) { query.addFilterQueries("{!field f=" + facetField + "_partial}" + value); } + if (StringUtils.isNotBlank(startsWith) && orderField != null) { + query.addFilterQueries( + "bi_" + orderField + "_sort:" + ClientUtils.escapeQueryChars(startsWith) + "*"); + } // filter on item to be sure to don't include any other object // indexed in the Discovery Search core query.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE); @@ -222,28 +228,21 @@ private DiscoverResult getSolrResponse() throws BrowseException { return sResponse; } - private void addStatusFilter(DiscoverQuery query) { - try { - if (!authorizeService.isAdmin(context) - && (authorizeService.isCommunityAdmin(context) - || authorizeService.isCollectionAdmin(context))) { - query.addFilterQueries(searcher.createLocationQueryForAdministrableItems(context)); - } - } catch (SQLException ex) { - log.error("Error looking up authorization rights of current user", ex); - } - } - private void addLocationScopeFilter(DiscoverQuery query) { - if (containerID != null) { + if (container != null) { if (containerIDField.startsWith("collection")) { - query.addFilterQueries("location.coll:" + containerID); + query.addFilterQueries("location.coll:" + container.getID()); } else if (containerIDField.startsWith("community")) { - query.addFilterQueries("location.comm:" + containerID); + query.addFilterQueries("location.comm:" + container.getID()); } } } + private void addDefaultFilterQueries(DiscoverQuery query) { + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container); + discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries); + } + @Override public int doCountQuery() throws BrowseException { DiscoverResult resp = getSolrResponse(); @@ -332,7 +331,7 @@ public int doOffsetQuery(String column, String value, boolean isAscending) throws BrowseException { DiscoverQuery query = new DiscoverQuery(); addLocationScopeFilter(query); - addStatusFilter(query); + addDefaultFilterQueries(query); query.setMaxResults(0); query.addFilterQueries("search.resourcetype:" + IndexableItem.TYPE); @@ -393,8 +392,8 @@ public void setEnableBrowseFrequencies(boolean enableBrowseFrequencies) { * @see org.dspace.browse.BrowseDAO#getContainerID() */ @Override - public UUID getContainerID() { - return containerID; + public DSpaceObject getContainer() { + return container; } /* @@ -556,8 +555,8 @@ public void setAscending(boolean ascending) { * @see org.dspace.browse.BrowseDAO#setContainerID(int) */ @Override - public void setContainerID(UUID containerID) { - this.containerID = containerID; + public void setContainer(DSpaceObject container) { + this.container = container; } diff --git a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java index 6b16d51bfe1e..a12ac3b98a2e 100644 --- a/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java +++ b/dspace-api/src/main/java/org/dspace/checker/CheckerCommand.java @@ -245,7 +245,7 @@ protected void processBitstream(MostRecentChecksum info) throws SQLException { info.setProcessStartDate(new Date()); try { - Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream()); + Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream()); if (MapUtils.isNotEmpty(checksumMap)) { info.setBitstreamFound(true); if (checksumMap.containsKey("checksum")) { @@ -255,10 +255,16 @@ protected void processBitstream(MostRecentChecksum info) throws SQLException { if (checksumMap.containsKey("checksum_algorithm")) { info.setChecksumAlgorithm(checksumMap.get("checksum_algorithm").toString()); } + + // compare new checksum to previous checksum + info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum())); + + } else { + info.setCurrentChecksum(""); + info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND)); + info.setToBeProcessed(false); } - // compare new checksum to previous checksum - info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum())); } catch (IOException e) { // bitstream located, but file missing from asset store info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND)); diff --git a/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java b/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java index f8d6560e9246..f7b05d4de9d3 100644 --- a/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/checker/ChecksumHistoryServiceImpl.java @@ -74,7 +74,8 @@ public void addHistory(Context context, MostRecentChecksum mostRecentChecksum) t if (mostRecentChecksum.getBitstream().isDeleted()) { checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.BITSTREAM_MARKED_DELETED); } else { - checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.CHECKSUM_MATCH); + checksumResult = checksumResultService.findByCode(context, + mostRecentChecksum.getChecksumResult().getResultCode()); } checksumHistory.setResult(checksumResult); diff --git a/dspace-api/src/main/java/org/dspace/checker/MostRecentChecksum.java b/dspace-api/src/main/java/org/dspace/checker/MostRecentChecksum.java index 5962d19f68c4..eff8a8be1cde 100644 --- a/dspace-api/src/main/java/org/dspace/checker/MostRecentChecksum.java +++ b/dspace-api/src/main/java/org/dspace/checker/MostRecentChecksum.java @@ -170,7 +170,7 @@ public boolean equals(Object o) { return true; } - if (o == null || getClass() != o.getClass()) { + if (o == null || !(o instanceof MostRecentChecksum)) { return false; } diff --git a/dspace-api/src/main/java/org/dspace/checker/ResultsLogger.java b/dspace-api/src/main/java/org/dspace/checker/ResultsLogger.java index 358d0c4018f3..f95778c4a8b0 100644 --- a/dspace-api/src/main/java/org/dspace/checker/ResultsLogger.java +++ b/dspace-api/src/main/java/org/dspace/checker/ResultsLogger.java @@ -109,7 +109,7 @@ public void collect(Context context, MostRecentChecksum info) throws SQLExceptio "unknown")); LOG.info(msg("new-checksum") + ": " + info.getCurrentChecksum()); LOG.info(msg("checksum-comparison-result") + ": " - + (info.getChecksumResult().getResultCode())); + + info.getChecksumResult().getResultCode()); LOG.info("\n\n"); } } diff --git a/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java b/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java index 849cddfb61c7..ddefb28e1b57 100644 --- a/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/checker/SimpleReporterServiceImpl.java @@ -77,7 +77,7 @@ public int getDeletedBitstreamReport(Context context, Date startDate, Date endDa osw.write(applyDateFormatShort(endDate)); osw.write("\n\n\n"); - if (recentChecksums.size() == 0) { + if (recentChecksums.isEmpty()) { osw.write("\n\n"); osw.write(msg("no-bitstreams-to-delete")); osw.write("\n"); @@ -119,7 +119,7 @@ public int getChangedChecksumReport(Context context, Date startDate, Date endDat osw.write(applyDateFormatShort(endDate)); osw.write("\n\n\n"); - if (history.size() == 0) { + if (history.isEmpty()) { osw.write("\n\n"); osw.write(msg("no-changed-bitstreams")); osw.write("\n"); @@ -152,6 +152,7 @@ public int getBitstreamNotFoundReport(Context context, Date startDate, Date endD osw.write("\n"); osw.write(msg("bitstream-not-found-report")); + osw.write(" "); osw.write(applyDateFormatShort(startDate)); osw.write(" "); osw.write(msg("date-range-to")); @@ -159,7 +160,7 @@ public int getBitstreamNotFoundReport(Context context, Date startDate, Date endD osw.write(applyDateFormatShort(endDate)); osw.write("\n\n\n"); - if (history.size() == 0) { + if (history.isEmpty()) { osw.write("\n\n"); osw.write(msg("no-bitstreams-changed")); osw.write("\n"); @@ -201,7 +202,7 @@ public int getNotToBeProcessedReport(Context context, Date startDate, Date endDa osw.write(applyDateFormatShort(endDate)); osw.write("\n\n\n"); - if (mostRecentChecksums.size() == 0) { + if (mostRecentChecksums.isEmpty()) { osw.write("\n\n"); osw.write(msg("no-bitstreams-to-no-longer-be-processed")); osw.write("\n"); @@ -230,10 +231,11 @@ public int getUncheckedBitstreamsReport(Context context, OutputStreamWriter osw) osw.write("\n"); osw.write(msg("unchecked-bitstream-report")); + osw.write(" "); osw.write(applyDateFormatShort(new Date())); osw.write("\n\n\n"); - if (bitstreams.size() == 0) { + if (bitstreams.isEmpty()) { osw.write("\n\n"); osw.write(msg("no-unchecked-bitstreams")); osw.write("\n"); @@ -257,7 +259,7 @@ public int getUncheckedBitstreamsReport(Context context, OutputStreamWriter osw) protected void printHistoryRecords(List mostRecentChecksums, OutputStreamWriter osw) throws IOException { for (MostRecentChecksum mostRecentChecksum : mostRecentChecksums) { - StringBuffer buf = new StringBuffer(1000); + StringBuilder buf = new StringBuilder(1000); buf.append("------------------------------------------------ \n"); buf.append(msg("bitstream-id")).append(" = ").append( mostRecentChecksum.getBitstream().getID()).append("\n"); @@ -292,7 +294,7 @@ protected void printDSpaceInfoRecords(Context context, List bitstream throws IOException, SQLException { for (Bitstream info : bitstreams) { - StringBuffer buf = new StringBuffer(1000); + StringBuilder buf = new StringBuilder(1000); buf.append("------------------------------------------------ \n"); buf.append(msg("format-id")).append(" = ").append( info.getFormat(context).getID()).append("\n"); diff --git a/dspace-api/src/main/java/org/dspace/checker/dao/impl/ChecksumResultDAOImpl.java b/dspace-api/src/main/java/org/dspace/checker/dao/impl/ChecksumResultDAOImpl.java index a82b904b28e4..7552c6d5bb8f 100644 --- a/dspace-api/src/main/java/org/dspace/checker/dao/impl/ChecksumResultDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/checker/dao/impl/ChecksumResultDAOImpl.java @@ -21,7 +21,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the ChecksumResult object. - * This class is responsible for all database calls for the ChecksumResult object and is autowired by spring + * This class is responsible for all database calls for the ChecksumResult object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -39,6 +39,6 @@ public ChecksumResult findByCode(Context context, ChecksumResultCode code) throw Root checksumResultRoot = criteriaQuery.from(ChecksumResult.class); criteriaQuery.select(checksumResultRoot); criteriaQuery.where(criteriaBuilder.equal(checksumResultRoot.get(ChecksumResult_.resultCode), code)); - return uniqueResult(context, criteriaQuery, false, ChecksumResult.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, ChecksumResult.class); } } diff --git a/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java b/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java index 66ce666b9d6d..a31e02cbab4a 100644 --- a/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/checker/dao/impl/MostRecentChecksumDAOImpl.java @@ -92,8 +92,8 @@ public List findByResultTypeInDateRange(Context context, Dat criteriaQuery.where(criteriaBuilder.and( criteriaBuilder.equal(mostRecentResult.get(ChecksumResult_.resultCode), resultCode), criteriaBuilder.lessThanOrEqualTo( - mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate), - criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate) + mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate), + criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate) ) ); List orderList = new LinkedList<>(); diff --git a/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java b/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java new file mode 100644 index 000000000000..afd74a588d17 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/cli/DSpaceSkipUnknownArgumentsParser.java @@ -0,0 +1,77 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.cli; + +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; + +/** + * Extended version of the DefaultParser. This parser skip/ignore unknown arguments. + */ +public class DSpaceSkipUnknownArgumentsParser extends DefaultParser { + + + @Override + public CommandLine parse(Options options, String[] arguments) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments)); + } + + @Override + public CommandLine parse(Options options, String[] arguments, Properties properties) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), properties); + } + + /** + * Parse the arguments according to the specified options and properties. + * @param options the specified Options + * @param arguments the command line arguments + * @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't + * stop the parsing and doesn't trigger a ParseException + * + * @return the list of atomic option and value tokens + * @throws ParseException if there are any problems encountered while parsing the command line tokens. + */ + @Override + public CommandLine parse(Options options, String[] arguments, boolean stopAtNonOption) throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), stopAtNonOption); + } + + /** + * Parse the arguments according to the specified options and properties. + * @param options the specified Options + * @param arguments the command line arguments + * @param properties command line option name-value pairs + * @param stopAtNonOption can be ignored - an unrecognized argument is ignored, an unrecognized argument doesn't + * stop the parsing and doesn't trigger a ParseException + * + * @return the list of atomic option and value tokens + * @throws ParseException if there are any problems encountered while parsing the command line tokens. + */ + @Override + public CommandLine parse(Options options, String[] arguments, Properties properties, boolean stopAtNonOption) + throws ParseException { + return super.parse(options, getOnlyKnownArguments(options, arguments), properties, stopAtNonOption); + } + + + private String[] getOnlyKnownArguments(Options options, String[] arguments) { + List knownArguments = new ArrayList<>(); + for (String arg : arguments) { + if (options.hasOption(arg)) { + knownArguments.add(arg); + } + } + return knownArguments.toArray(new String[0]); + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/Bitstream.java b/dspace-api/src/main/java/org/dspace/content/Bitstream.java index b290ae66d68e..451a3b75784d 100644 --- a/dspace-api/src/main/java/org/dspace/content/Bitstream.java +++ b/dspace-api/src/main/java/org/dspace/content/Bitstream.java @@ -21,8 +21,6 @@ import javax.persistence.Table; import javax.persistence.Transient; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamService; import org.dspace.core.Constants; @@ -37,17 +35,10 @@ * the contents of a bitstream; you need to create a new bitstream. * * @author Robert Tansley - * @version $Revision$ */ @Entity @Table(name = "bitstream") public class Bitstream extends DSpaceObject implements DSpaceObjectLegacySupport { - - /** - * log4j logger - */ - private static final Logger log = LogManager.getLogger(); - @Column(name = "bitstream_id", insertable = false, updatable = false) private Integer legacyId; @@ -412,7 +403,7 @@ private BitstreamService getBitstreamService() { */ @Override public boolean equals(Object other) { - if (other == null) { + if (!(other instanceof Bitstream)) { return false; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(other); @@ -420,11 +411,7 @@ public boolean equals(Object other) { return false; } final Bitstream otherBitstream = (Bitstream) other; - if (!this.getID().equals(otherBitstream.getID())) { - return false; - } - - return true; + return this.getID().equals(otherBitstream.getID()); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamFormat.java b/dspace-api/src/main/java/org/dspace/content/BitstreamFormat.java index d543cc7c6ed5..6d64ee3073e9 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamFormat.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamFormat.java @@ -9,7 +9,7 @@ import java.io.Serializable; import java.sql.SQLException; -import java.util.LinkedList; +import java.util.ArrayList; import java.util.List; import javax.persistence.CollectionTable; import javax.persistence.Column; @@ -40,7 +40,6 @@ * when update is called. * * @author Robert Tansley - * @version $Revision$ */ @Entity @Table(name = "bitstreamformatregistry") @@ -112,7 +111,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity * {@link org.dspace.content.service.BitstreamFormatService#create(Context)} */ protected BitstreamFormat() { - fileExtensions = new LinkedList<>(); + fileExtensions = new ArrayList<>(); } /** @@ -120,6 +119,7 @@ protected BitstreamFormat() { * * @return the internal identifier */ + @Override public final Integer getID() { return id; } @@ -267,7 +267,7 @@ private BitstreamFormatService getBitstreamFormatService() { */ @Override public boolean equals(Object other) { - if (other == null) { + if (!(other instanceof BitstreamFormat)) { return false; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(other); @@ -275,11 +275,7 @@ public boolean equals(Object other) { return false; } final BitstreamFormat otherBitstreamFormat = (BitstreamFormat) other; - if (!this.getID().equals(otherBitstreamFormat.getID())) { - return false; - } - - return true; + return this.getID().equals(otherBitstreamFormat.getID()); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamFormatServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamFormatServiceImpl.java index 89bf74ece6b9..fa5932ded1f7 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamFormatServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamFormatServiceImpl.java @@ -18,7 +18,7 @@ import org.dspace.content.dao.BitstreamFormatDAO; import org.dspace.content.service.BitstreamFormatService; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.springframework.beans.factory.annotation.Autowired; /** @@ -68,7 +68,7 @@ public BitstreamFormat find(Context context, int id) if (bitstreamFormat == null) { if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, + log.debug(LogHelper.getHeader(context, "find_bitstream_format", "not_found,bitstream_format_id=" + id)); } @@ -78,7 +78,7 @@ public BitstreamFormat find(Context context, int id) // not null, return format object if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_bitstream_format", + log.debug(LogHelper.getHeader(context, "find_bitstream_format", "bitstream_format_id=" + id)); } @@ -129,7 +129,7 @@ public BitstreamFormat create(Context context) throws SQLException, AuthorizeExc BitstreamFormat bitstreamFormat = bitstreamFormatDAO.create(context, new BitstreamFormat()); - log.info(LogManager.getHeader(context, "create_bitstream_format", + log.info(LogHelper.getHeader(context, "create_bitstream_format", "bitstream_format_id=" + bitstreamFormat.getID())); @@ -189,7 +189,7 @@ public void update(Context context, List bitstreamFormats) } for (BitstreamFormat bitstreamFormat : bitstreamFormats) { - log.info(LogManager.getHeader(context, "update_bitstream_format", + log.info(LogHelper.getHeader(context, "update_bitstream_format", "bitstream_format_id=" + bitstreamFormat.getID())); bitstreamFormatDAO.save(context, bitstreamFormat); @@ -218,7 +218,7 @@ public void delete(Context context, BitstreamFormat bitstreamFormat) throws SQLE // Delete this format from database bitstreamFormatDAO.delete(context, bitstreamFormat); - log.info(LogManager.getHeader(context, "delete_bitstream_format", + log.info(LogHelper.getHeader(context, "delete_bitstream_format", "bitstream_format_id=" + bitstreamFormat.getID() + ",bitstreams_changed=" + numberChanged)); } diff --git a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java index 98760a43fe0a..e23e5ce2c825 100644 --- a/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BitstreamServiceImpl.java @@ -28,7 +28,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.event.Event; import org.dspace.storage.bitstore.service.BitstreamStorageService; import org.springframework.beans.factory.annotation.Autowired; @@ -45,7 +45,8 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl imp /** * log4j logger */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BitstreamServiceImpl.class); + private static final Logger log + = org.apache.logging.log4j.LogManager.getLogger(); @Autowired(required = true) @@ -73,7 +74,7 @@ public Bitstream find(Context context, UUID id) throws SQLException { if (bitstream == null) { if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_bitstream", + log.debug(LogHelper.getHeader(context, "find_bitstream", "not_found,bitstream_id=" + id)); } @@ -82,7 +83,7 @@ public Bitstream find(Context context, UUID id) throws SQLException { // not null, return Bitstream if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_bitstream", + log.debug(LogHelper.getHeader(context, "find_bitstream", "bitstream_id=" + id)); } @@ -96,7 +97,7 @@ public List findAll(Context context) throws SQLException { @Override public Bitstream clone(Context context, Bitstream bitstream) - throws SQLException { + throws SQLException, AuthorizeException { // Create a new bitstream with a new ID. Bitstream clonedBitstream = bitstreamDAO.create(context, new Bitstream()); // Set the internal identifier, file size, checksum, and @@ -106,18 +107,7 @@ public Bitstream clone(Context context, Bitstream bitstream) clonedBitstream.setChecksum(bitstream.getChecksum()); clonedBitstream.setChecksumAlgorithm(bitstream.getChecksumAlgorithm()); clonedBitstream.setFormat(bitstream.getBitstreamFormat()); - - try { - //Update our bitstream but turn off the authorization system since permissions - //haven't been set at this point in time. - context.turnOffAuthorisationSystem(); - update(context, clonedBitstream); - } catch (AuthorizeException e) { - log.error(e); - //Can never happen since we turn off authorization before we update - } finally { - context.restoreAuthSystemState(); - } + update(context, clonedBitstream); return clonedBitstream; } @@ -131,7 +121,7 @@ public Bitstream create(Context context, InputStream is) throws IOException, SQL // Store the bits UUID bitstreamID = bitstreamStorageService.store(context, bitstreamDAO.create(context, new Bitstream()), is); - log.info(LogManager.getHeader(context, "create_bitstream", + log.info(LogHelper.getHeader(context, "create_bitstream", "bitstream_id=" + bitstreamID)); // Set the format to "unknown" @@ -191,7 +181,7 @@ public Bitstream register(Context context, bitstreamStorageService.register( context, bitstream, assetstore, bitstreamPath); - log.info(LogManager.getHeader(context, + log.info(LogHelper.getHeader(context, "create_bitstream", "bitstream_id=" + bitstream.getID())); @@ -248,7 +238,7 @@ public void update(Context context, Bitstream bitstream) throws SQLException, Au // Check authorisation authorizeService.authorizeAction(context, bitstream, Constants.WRITE); - log.info(LogManager.getHeader(context, "update_bitstream", + log.info(LogHelper.getHeader(context, "update_bitstream", "bitstream_id=" + bitstream.getID())); super.update(context, bitstream); if (bitstream.isModified()) { @@ -273,7 +263,7 @@ public void delete(Context context, Bitstream bitstream) throws SQLException, Au // changed to a check on delete // Check authorisation authorizeService.authorizeAction(context, bitstream, Constants.DELETE); - log.info(LogManager.getHeader(context, "delete_bitstream", + log.info(LogHelper.getHeader(context, "delete_bitstream", "bitstream_id=" + bitstream.getID())); context.addEvent(new Event(Event.DELETE, Constants.BITSTREAM, bitstream.getID(), @@ -286,6 +276,11 @@ public void delete(Context context, Bitstream bitstream) throws SQLException, Au //Remove our bitstream from all our bundles final List bundles = bitstream.getBundles(); for (Bundle bundle : bundles) { + authorizeService.authorizeAction(context, bundle, Constants.REMOVE); + //We also need to remove the bitstream id when it's set as bundle's primary bitstream + if (bitstream.equals(bundle.getPrimaryBitstream())) { + bundle.unsetPrimaryBitstreamID(); + } bundle.removeBitstream(bitstream); } @@ -342,15 +337,16 @@ public void updateLastModified(Context context, Bitstream bitstream) { } @Override - public List findDeletedBitstreams(Context context) throws SQLException { - return bitstreamDAO.findDeletedBitstreams(context); + public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException { + return bitstreamDAO.findDeletedBitstreams(context, limit, offset); } @Override public void expunge(Context context, Bitstream bitstream) throws SQLException, AuthorizeException { authorizeService.authorizeAction(context, bitstream, Constants.DELETE); if (!bitstream.isDeleted()) { - throw new IllegalStateException("Bitstream must be deleted before it can be removed from the database"); + throw new IllegalStateException("Bitstream " + bitstream.getID().toString() + + " must be deleted before it can be removed from the database."); } bitstreamDAO.delete(context, bitstream); } @@ -412,7 +408,7 @@ public Bitstream getFirstBitstream(Item item, String bundleName) throws SQLExcep @Override public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLException { - Pattern pattern = Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); + Pattern pattern = getBitstreamNamePattern(bitstream); for (Bundle bundle : bitstream.getBundles()) { for (Item item : bundle.getItems()) { @@ -429,6 +425,13 @@ public Bitstream getThumbnail(Context context, Bitstream bitstream) throws SQLEx return null; } + protected Pattern getBitstreamNamePattern(Bitstream bitstream) { + if (bitstream.getName() != null) { + return Pattern.compile("^" + Pattern.quote(bitstream.getName()) + ".([^.]+)$"); + } + return Pattern.compile("^" + bitstream.getName() + ".([^.]+)$"); + } + @Override public BitstreamFormat getFormat(Context context, Bitstream bitstream) throws SQLException { if (bitstream.getBitstreamFormat() == null) { @@ -455,10 +458,15 @@ public int countTotal(Context context) throws SQLException { @Override public Bitstream findByIdOrLegacyId(Context context, String id) throws SQLException { - if (StringUtils.isNumeric(id)) { - return findByLegacyId(context, Integer.parseInt(id)); - } else { - return find(context, UUID.fromString(id)); + try { + if (StringUtils.isNumeric(id)) { + return findByLegacyId(context, Integer.parseInt(id)); + } else { + return find(context, UUID.fromString(id)); + } + } catch (IllegalArgumentException e) { + // Not a valid legacy ID or valid UUID + return null; } } diff --git a/dspace-api/src/main/java/org/dspace/content/Bundle.java b/dspace-api/src/main/java/org/dspace/content/Bundle.java index 88f21c2c2f67..e5cbdb6ff244 100644 --- a/dspace-api/src/main/java/org/dspace/content/Bundle.java +++ b/dspace-api/src/main/java/org/dspace/content/Bundle.java @@ -9,7 +9,6 @@ import java.sql.SQLException; import java.util.ArrayList; -import java.util.LinkedList; import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; @@ -127,7 +126,7 @@ public void setPrimaryBitstreamID(Bitstream bitstream) { * Unset the primary bitstream ID of the bundle */ public void unsetPrimaryBitstreamID() { - primaryBitstream = null; + setPrimaryBitstreamID(null); } /** @@ -138,7 +137,7 @@ public void unsetPrimaryBitstreamID() { * @return the bitstreams */ public List getBitstreams() { - List bitstreamList = new LinkedList<>(this.bitstreams); + List bitstreamList = new ArrayList<>(this.bitstreams); return bitstreamList; } @@ -191,7 +190,7 @@ void addItem(Item item) { @Override public boolean equals(Object obj) { - if (obj == null) { + if (obj == null || !(obj instanceof Bundle)) { return false; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj); @@ -202,10 +201,7 @@ public boolean equals(Object obj) { if (this.getType() != other.getType()) { return false; } - if (!this.getID().equals(other.getID())) { - return false; - } - return true; + return this.getID().equals(other.getID()); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 7605b6f399f9..3ba90c8cc2ae 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -8,6 +8,7 @@ package org.dspace.content; import static org.dspace.core.Constants.ADD; +import static org.dspace.core.Constants.READ; import static org.dspace.core.Constants.REMOVE; import static org.dspace.core.Constants.WRITE; @@ -33,7 +34,8 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; +import org.dspace.eperson.Group; import org.dspace.event.Event; import org.springframework.beans.factory.annotation.Autowired; @@ -73,15 +75,15 @@ public Bundle find(Context context, UUID id) throws SQLException { Bundle bundle = bundleDAO.findByID(context, Bundle.class, id); if (bundle == null) { if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_bundle", - "not_found,bundle_id=" + id)); + log.debug(LogHelper.getHeader(context, "find_bundle", + "not_found,bundle_id=" + id)); } return null; } else { if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_bundle", - "bundle_id=" + id)); + log.debug(LogHelper.getHeader(context, "find_bundle", + "bundle_id=" + id)); } return bundle; @@ -105,8 +107,8 @@ public Bundle create(Context context, Item item, String name) throws SQLExceptio } - log.info(LogManager.getHeader(context, "create_bundle", "bundle_id=" - + bundle.getID())); + log.info(LogHelper.getHeader(context, "create_bundle", "bundle_id=" + + bundle.getID())); // if we ever use the identifier service for bundles, we should // create the bundle before we create the Event and should add all @@ -132,12 +134,12 @@ public Bitstream getBitstreamByName(Bundle bundle, String name) { @Override public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.ADD); - log.info(LogManager.getHeader(context, "add_bitstream", "bundle_id=" - + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + log.info(LogHelper.getHeader(context, "add_bitstream", "bundle_id=" + + bundle.getID() + ",bitstream_id=" + bitstream.getID())); // First check that the bitstream isn't already in the list List bitstreams = bundle.getBitstreams(); @@ -158,32 +160,69 @@ public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) } bundle.addBitstream(bitstream); + // If a bitstream is moved from one bundle to another it may be temporarily flagged as deleted + // (when removed from the original bundle) + if (bitstream.isDeleted()) { + bitstream.setDeleted(false); + } bitstream.getBundles().add(bundle); context.addEvent(new Event(Event.ADD, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); // copy authorization policies from bundle to bitstream // FIXME: multiple inclusion is affected by this... authorizeService.inheritPolicies(context, bundle, bitstream); + // The next logic is a bit overly cautious but ensures that if there are any future start dates + // on the item or bitstream read policies, that we'll skip inheriting anything from the owning collection + // just in case. In practice, the item install process would overwrite these anyway but it may satisfy + // some other bitstream creation methods and integration tests + boolean isEmbargoed = false; + for (ResourcePolicy resourcePolicy : authorizeService.getPoliciesActionFilter(context, owningItem, READ)) { + if (!resourcePolicyService.isDateValid(resourcePolicy)) { + isEmbargoed = true; + break; + } + } + if (owningItem != null && !isEmbargoed) { + // Resolve owning collection + Collection owningCollection = owningItem.getOwningCollection(); + if (owningCollection != null) { + // Get DEFAULT_BITSTREAM_READ policy from the collection + List defaultBitstreamReadGroups = + authorizeService.getAuthorizedGroups(context, owningCollection, + Constants.DEFAULT_BITSTREAM_READ); + // If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy + // inherited from the bundle with this policy. + if (!defaultBitstreamReadGroups.isEmpty()) { + // Remove read policies from the bitstream + authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ); + for (Group defaultBitstreamReadGroup : defaultBitstreamReadGroups) { + // Inherit this policy as READ, directly from the collection roles + authorizeService.addPolicy(context, bitstream, + Constants.READ, defaultBitstreamReadGroup, ResourcePolicy.TYPE_INHERITED); + } + } + } + } bitstreamService.update(context, bitstream); } @Override public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws AuthorizeException, SQLException, IOException { + throws AuthorizeException, SQLException, IOException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.REMOVE); - log.info(LogManager.getHeader(context, "remove_bitstream", - "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + log.info(LogHelper.getHeader(context, "remove_bitstream", + "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); context.addEvent(new Event(Event.REMOVE, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); //Ensure that the last modified from the item is triggered ! Item owningItem = (Item) getParentObject(context, bundle); @@ -216,9 +255,9 @@ public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream) @Override public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Collection collection) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List policies = authorizeService.getPoliciesActionFilter(context, collection, - Constants.DEFAULT_BITSTREAM_READ); + Constants.DEFAULT_BITSTREAM_READ); // change the action to just READ // just don't call update on the resourcepolicies!!! @@ -226,7 +265,7 @@ public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Col if (!i.hasNext()) { throw new java.sql.SQLException("Collection " + collection.getID() - + " has no default bitstream READ policies"); + + " has no default bitstream READ policies"); } List newPolicies = new ArrayList(); @@ -241,7 +280,7 @@ public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Col @Override public void replaceAllBitstreamPolicies(Context context, Bundle bundle, List newpolicies) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List bitstreams = bundle.getBitstreams(); if (CollectionUtils.isNotEmpty(bitstreams)) { for (Bitstream bs : bitstreams) { @@ -362,17 +401,17 @@ public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws // If we have an invalid Bitstream ID, just ignore it, but log a warning if (bitstream == null) { //This should never occur but just in case - log.warn(LogManager.getHeader(context, "Invalid bitstream id while changing bitstream order", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + log.warn(LogHelper.getHeader(context, "Invalid bitstream id while changing bitstream order", + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); continue; } // If we have a Bitstream not in the current list, log a warning & exit immediately if (!currentBitstreams.contains(bitstream)) { - log.warn(LogManager.getHeader(context, - "Encountered a bitstream not in this bundle while changing bitstream " + - "order. Bitstream order will not be changed.", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + log.warn(LogHelper.getHeader(context, + "Encountered a bitstream not in this bundle while changing bitstream " + + "order. Bitstream order will not be changed.", + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); return; } updatedBitstreams.add(bitstream); @@ -380,10 +419,10 @@ public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws // If our lists are different sizes, exit immediately if (updatedBitstreams.size() != currentBitstreams.size()) { - log.warn(LogManager.getHeader(context, - "Size of old list and new list do not match. Bitstream order will not be " + - "changed.", - "Bundle: " + bundle.getID())); + log.warn(LogHelper.getHeader(context, + "Size of old list and new list do not match. Bitstream order will not be " + + "changed.", + "Bundle: " + bundle.getID())); return; } @@ -429,7 +468,7 @@ public DSpaceObject getAdminObject(Context context, Bundle bundle, int action) t } else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamDeletion()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamDeletion()) { + .canCommunityAdminPerformBitstreamDeletion()) { adminObject = community; } break; @@ -437,10 +476,10 @@ public DSpaceObject getAdminObject(Context context, Bundle bundle, int action) t if (AuthorizeConfiguration.canItemAdminPerformBitstreamCreation()) { adminObject = item; } else if (AuthorizeConfiguration - .canCollectionAdminPerformBitstreamCreation()) { + .canCollectionAdminPerformBitstreamCreation()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamCreation()) { + .canCommunityAdminPerformBitstreamCreation()) { adminObject = community; } break; @@ -471,8 +510,8 @@ public void updateLastModified(Context context, Bundle dso) { public void update(Context context, Bundle bundle) throws SQLException, AuthorizeException { // Check authorisation //AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE); - log.info(LogManager.getHeader(context, "update_bundle", "bundle_id=" - + bundle.getID())); + log.info(LogHelper.getHeader(context, "update_bundle", "bundle_id=" + + bundle.getID())); super.update(context, bundle); bundleDAO.save(context, bundle); @@ -480,10 +519,10 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz if (bundle.isModified() || bundle.isMetadataModified()) { if (bundle.isMetadataModified()) { context.addEvent(new Event(Event.MODIFY_METADATA, bundle.getType(), bundle.getID(), bundle.getDetails(), - getIdentifiers(context, bundle))); + getIdentifiers(context, bundle))); } context.addEvent(new Event(Event.MODIFY, Constants.BUNDLE, bundle.getID(), - null, getIdentifiers(context, bundle))); + null, getIdentifiers(context, bundle))); bundle.clearModified(); bundle.clearDetails(); } @@ -491,13 +530,13 @@ public void update(Context context, Bundle bundle) throws SQLException, Authoriz @Override public void delete(Context context, Bundle bundle) throws SQLException, AuthorizeException, IOException { - log.info(LogManager.getHeader(context, "delete_bundle", "bundle_id=" - + bundle.getID())); + log.info(LogHelper.getHeader(context, "delete_bundle", "bundle_id=" + + bundle.getID())); authorizeService.authorizeAction(context, bundle, Constants.DELETE); context.addEvent(new Event(Event.DELETE, Constants.BUNDLE, bundle.getID(), - bundle.getName(), getIdentifiers(context, bundle))); + bundle.getName(), getIdentifiers(context, bundle))); // Remove bitstreams List bitstreams = bundle.getBitstreams(); @@ -523,10 +562,15 @@ public int getSupportsTypeConstant() { @Override public Bundle findByIdOrLegacyId(Context context, String id) throws SQLException { - if (StringUtils.isNumeric(id)) { - return findByLegacyId(context, Integer.parseInt(id)); - } else { - return find(context, UUID.fromString(id)); + try { + if (StringUtils.isNumeric(id)) { + return findByLegacyId(context, Integer.parseInt(id)); + } else { + return find(context, UUID.fromString(id)); + } + } catch (IllegalArgumentException e) { + // Not a valid legacy ID or valid UUID + return null; } } diff --git a/dspace-api/src/main/java/org/dspace/content/Collection.java b/dspace-api/src/main/java/org/dspace/content/Collection.java index 0658cc2d9361..53b63dbef1fa 100644 --- a/dspace-api/src/main/java/org/dspace/content/Collection.java +++ b/dspace-api/src/main/java/org/dspace/content/Collection.java @@ -14,6 +14,7 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.UUID; import javax.annotation.Nonnull; import javax.persistence.Cacheable; import javax.persistence.CascadeType; @@ -28,6 +29,7 @@ import javax.persistence.Transient; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.comparator.NameAscendingComparator; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; @@ -104,6 +106,16 @@ protected Collection() { } + /** + * Takes a pre-determined UUID to be passed to the object to allow for the + * restoration of previously defined UUID's. + * + * @param uuid Takes a uuid to be passed to the Pre-Defined UUID Generator + */ + protected Collection(UUID uuid) { + this.predefinedUUID = uuid; + } + @Override public String getName() { String value = getCollectionService() @@ -325,4 +337,17 @@ private CollectionService getCollectionService() { return collectionService; } + /** + * return count of the collection items + * + * @return int + */ + public int countArchivedItems() { + try { + return collectionService.countArchivedItems(this); + } catch (ItemCountException e) { + throw new RuntimeException(e); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index 380c0336af67..652d2a5f38a0 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -17,6 +17,7 @@ import java.util.List; import java.util.Map; import java.util.MissingResourceException; +import java.util.Objects; import java.util.Set; import java.util.UUID; @@ -30,6 +31,8 @@ import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.browse.ItemCountException; +import org.dspace.browse.ItemCounter; import org.dspace.content.dao.CollectionDAO; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.CollectionService; @@ -39,9 +42,10 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.I18nUtil; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.core.service.LicenseService; import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverQuery.SORT_ORDER; import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchService; @@ -129,12 +133,23 @@ public Collection create(Context context, Community community) throws SQLExcepti @Override public Collection create(Context context, Community community, String handle) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { + return create(context, community, handle, null); + } + + @Override + public Collection create(Context context, Community community, + String handle, UUID uuid) throws SQLException, AuthorizeException { if (community == null) { throw new IllegalArgumentException("Community cannot be null when creating a new collection."); } - Collection newCollection = collectionDAO.create(context, new Collection()); + Collection newCollection; + if (uuid != null) { + newCollection = collectionDAO.create(context, new Collection(uuid)); + } else { + newCollection = collectionDAO.create(context, new Collection()); + } //Add our newly created collection to our community, authorization checks occur in THIS method communityService.addCollection(context, community, newCollection); @@ -146,9 +161,10 @@ public Collection create(Context context, Community community, String handle) authorizeService.createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.READ, null); // now create the default policies for submitted items authorizeService - .createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.DEFAULT_ITEM_READ, null); + .createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.DEFAULT_ITEM_READ, null); authorizeService - .createResourcePolicy(context, newCollection, anonymousGroup, null, Constants.DEFAULT_BITSTREAM_READ, null); + .createResourcePolicy(context, newCollection, anonymousGroup, null, + Constants.DEFAULT_BITSTREAM_READ, null); collectionDAO.save(context, newCollection); @@ -164,12 +180,12 @@ public Collection create(Context context, Community community, String handle) } context.addEvent(new Event(Event.CREATE, Constants.COLLECTION, - newCollection.getID(), newCollection.getHandle(), - getIdentifiers(context, newCollection))); + newCollection.getID(), newCollection.getHandle(), + getIdentifiers(context, newCollection))); - log.info(LogManager.getHeader(context, "create_collection", - "collection_id=" + newCollection.getID()) - + ",handle=" + newCollection.getHandle()); + log.info(LogHelper.getHeader(context, "create_collection", + "collection_id=" + newCollection.getID()) + + ",handle=" + newCollection.getHandle()); return newCollection; } @@ -318,7 +334,7 @@ public void setMetadataSingleValue(Context context, Collection collection, * whitespace. */ if (value == null) { - clearMetadata(context, collection, field.SCHEMA, field.ELEMENT, field.QUALIFIER, Item.ANY); + clearMetadata(context, collection, field.schema, field.element, field.qualifier, Item.ANY); collection.setMetadataModified(); } else { super.setMetadataSingleValue(context, collection, field, null, value); @@ -345,7 +361,7 @@ public Bitstream setLogo(Context context, Collection collection, InputStream is) if (is == null) { collection.setLogo(null); - log.info(LogManager.getHeader(context, "remove_logo", + log.info(LogHelper.getHeader(context, "remove_logo", "collection_id=" + collection.getID())); } else { Bitstream newLogo = bitstreamService.create(context, is); @@ -357,7 +373,7 @@ public Bitstream setLogo(Context context, Collection collection, InputStream is) .getPoliciesActionFilter(context, collection, Constants.READ); authorizeService.addPolicies(context, policies, newLogo); - log.info(LogManager.getHeader(context, "set_logo", + log.info(LogHelper.getHeader(context, "set_logo", "collection_id=" + collection.getID() + "logo_bitstream_id=" + newLogo.getID())); } @@ -393,7 +409,7 @@ public void setWorkflowGroup(Context context, Collection collection, int step, G try { workflow = workflowFactory.getWorkflow(collection); } catch (WorkflowConfigurationException e) { - log.error(LogManager.getHeader(context, "setWorkflowGroup", + log.error(LogHelper.getHeader(context, "setWorkflowGroup", "collection_id=" + collection.getID() + " " + e.getMessage()), e); } if (!StringUtils.equals(workflowFactory.getDefaultWorkflow().getID(), workflow.getID())) { @@ -524,6 +540,8 @@ public Group createAdministrators(Context context, Collection collection) throws // register this as the admin group collection.setAdmins(admins); + context.addEvent(new Event(Event.MODIFY, Constants.COLLECTION, collection.getID(), + null, getIdentifiers(context, collection))); return admins; } @@ -540,6 +558,8 @@ public void removeAdministrators(Context context, Collection collection) throws // Remove the link to the collection table. collection.setAdmins(null); + context.addEvent(new Event(Event.MODIFY, Constants.COLLECTION, collection.getID(), + null, getIdentifiers(context, collection))); } @Override @@ -569,7 +589,7 @@ public void createTemplateItem(Context context, Collection collection) throws SQ Item template = itemService.createTemplateItem(context, collection); collection.setTemplateItem(template); - log.info(LogManager.getHeader(context, "create_template_item", + log.info(LogHelper.getHeader(context, "create_template_item", "collection_id=" + collection.getID() + ",template_item_id=" + template.getID())); } @@ -584,7 +604,7 @@ public void removeTemplateItem(Context context, Collection collection) Item template = collection.getTemplateItem(); if (template != null) { - log.info(LogManager.getHeader(context, "remove_template_item", + log.info(LogHelper.getHeader(context, "remove_template_item", "collection_id=" + collection.getID() + ",template_item_id=" + template.getID())); // temporarily turn off auth system, we have already checked the permission on the top of the method @@ -604,7 +624,7 @@ public void addItem(Context context, Collection collection, Item item) throws SQ // Check authorisation authorizeService.authorizeAction(context, collection, Constants.ADD); - log.info(LogManager.getHeader(context, "add_item", "collection_id=" + log.info(LogHelper.getHeader(context, "add_item", "collection_id=" + collection.getID() + ",item_id=" + item.getID())); // Create mapping @@ -645,7 +665,7 @@ public void update(Context context, Collection collection) throws SQLException, // Check authorisation canEdit(context, collection, true); - log.info(LogManager.getHeader(context, "update_collection", + log.info(LogHelper.getHeader(context, "update_collection", "collection_id=" + collection.getID())); super.update(context, collection); @@ -657,8 +677,11 @@ public void update(Context context, Collection collection) throws SQLException, collection.clearModified(); } if (collection.isMetadataModified()) { - collection.clearDetails(); + context.addEvent(new Event(Event.MODIFY_METADATA, Constants.COLLECTION, collection.getID(), + collection.getDetails(),getIdentifiers(context, collection))); + collection.clearModified(); } + collection.clearDetails(); } @Override @@ -702,7 +725,7 @@ public void canEdit(Context context, Collection collection, boolean useInheritan @Override public void delete(Context context, Collection collection) throws SQLException, AuthorizeException, IOException { - log.info(LogManager.getHeader(context, "delete_collection", + log.info(LogHelper.getHeader(context, "delete_collection", "collection_id=" + collection.getID())); // remove harvested collections. @@ -715,7 +738,7 @@ public void delete(Context context, Collection collection) throws SQLException, collection.getID(), collection.getHandle(), getIdentifiers(context, collection))); // remove subscriptions - hmm, should this be in Subscription.java? - subscribeService.deleteByCollection(context, collection); + subscribeService.deleteByDspaceObject(context, collection); // Remove Template Item removeTemplateItem(context, collection); @@ -872,10 +895,15 @@ public void updateLastModified(Context context, Collection collection) throws SQ @Override public Collection findByIdOrLegacyId(Context context, String id) throws SQLException { - if (StringUtils.isNumeric(id)) { - return findByLegacyId(context, Integer.parseInt(id)); - } else { - return find(context, UUID.fromString(id)); + try { + if (StringUtils.isNumeric(id)) { + return findByLegacyId(context, Integer.parseInt(id)); + } else { + return find(context, UUID.fromString(id)); + } + } catch (IllegalArgumentException e) { + // Not a valid legacy ID or valid UUID + return null; } } @@ -900,8 +928,7 @@ public Group createDefaultReadGroup(Context context, Collection collection, Stri int defaultRead) throws SQLException, AuthorizeException { Group role = groupService.create(context); - groupService.setName(role, "COLLECTION_" + collection.getID().toString() + "_" + typeOfGroupString + - "_DEFAULT_READ"); + groupService.setName(role, getDefaultReadGroupName(collection, typeOfGroupString)); // Remove existing privileges from the anonymous group. authorizeService.removePoliciesActionFilter(context, collection, defaultRead); @@ -912,6 +939,12 @@ public Group createDefaultReadGroup(Context context, Collection collection, Stri return role; } + @Override + public String getDefaultReadGroupName(Collection collection, String typeOfGroupString) { + return "COLLECTION_" + collection.getID().toString() + "_" + typeOfGroupString + + "_DEFAULT_READ"; + } + @Override public List findCollectionsWithSubmit(String q, Context context, Community community, int offset, int limit) throws SQLException, SearchServiceException { @@ -921,7 +954,8 @@ public List findCollectionsWithSubmit(String q, Context context, Com discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); discoverQuery.setStart(offset); discoverQuery.setMaxResults(limit); - DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery,community, q); + discoverQuery.setSortField(SOLR_SORT_FIELD, SORT_ORDER.asc); + DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery, null, community, q); for (IndexableObject solrCollections : resp.getIndexableObjects()) { Collection c = ((IndexableCollection) solrCollections).getIndexedObject(); collections.add(c); @@ -936,7 +970,7 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu DiscoverQuery discoverQuery = new DiscoverQuery(); discoverQuery.setMaxResults(0); discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); - DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery,community,q); + DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery, null, community, q); return (int)resp.getTotalSearchResults(); } @@ -944,9 +978,10 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu * Finds all Indexed Collections where the current user has submit rights. If the user is an Admin, * this is all Indexed Collections. Otherwise, it includes those collections where * an indexed "submit" policy lists either the eperson or one of the eperson's groups - * + * * @param context DSpace context * @param discoverQuery + * @param entityType limit the returned collection to those related to given entity type * @param community parent community, could be null * @param q limit the returned collection to those with metadata values matching the query * terms. The terms are used to make also a prefix query on SOLR @@ -956,7 +991,8 @@ public int countCollectionsWithSubmit(String q, Context context, Community commu * @throws SearchServiceException if search error */ private DiscoverResult retrieveCollectionsWithSubmit(Context context, DiscoverQuery discoverQuery, - Community community, String q) throws SQLException, SearchServiceException { + String entityType, Community community, String q) + throws SQLException, SearchServiceException { StringBuilder query = new StringBuilder(); EPerson currentUser = context.getCurrentUser(); @@ -966,6 +1002,7 @@ private DiscoverResult retrieveCollectionsWithSubmit(Context context, DiscoverQu userId = currentUser.getID().toString(); } query.append("submit:(e").append(userId); + Set groups = groupService.allMemberGroupsSet(context, currentUser); for (Group group : groups) { query.append(" OR g").append(group.getID()); @@ -973,16 +1010,134 @@ private DiscoverResult retrieveCollectionsWithSubmit(Context context, DiscoverQu query.append(")"); discoverQuery.addFilterQueries(query.toString()); } - if (community != null) { + if (Objects.nonNull(community)) { discoverQuery.addFilterQueries("location.comm:" + community.getID().toString()); } + if (StringUtils.isNotBlank(entityType)) { + discoverQuery.addFilterQueries("search.entitytype:" + entityType); + } if (StringUtils.isNotBlank(q)) { StringBuilder buildQuery = new StringBuilder(); String escapedQuery = ClientUtils.escapeQueryChars(q); - buildQuery.append(escapedQuery).append(" OR ").append(escapedQuery).append("*"); + buildQuery.append("(").append(escapedQuery).append(" OR ").append(escapedQuery).append("*").append(")"); discoverQuery.setQuery(buildQuery.toString()); } DiscoverResult resp = searchService.search(context, discoverQuery); return resp; } + + @Override + public Collection retrieveCollectionWithSubmitByEntityType(Context context, Item item, + String entityType) throws SQLException { + Collection ownCollection = item.getOwningCollection(); + return retrieveWithSubmitCollectionByEntityType(context, ownCollection.getCommunities(), entityType); + } + + private Collection retrieveWithSubmitCollectionByEntityType(Context context, List communities, + String entityType) { + + for (Community community : communities) { + Collection collection = retrieveCollectionWithSubmitByCommunityAndEntityType(context, community, + entityType); + if (collection != null) { + return collection; + } + } + + for (Community community : communities) { + List parentCommunities = community.getParentCommunities(); + Collection collection = retrieveWithSubmitCollectionByEntityType(context, parentCommunities, entityType); + if (collection != null) { + return collection; + } + } + + return retrieveCollectionWithSubmitByCommunityAndEntityType(context, null, entityType); + } + + @Override + public Collection retrieveCollectionWithSubmitByCommunityAndEntityType(Context context, Community community, + String entityType) { + context.turnOffAuthorisationSystem(); + List collections; + try { + collections = findCollectionsWithSubmit(null, context, community, entityType, 0, 1); + } catch (SQLException | SearchServiceException e) { + throw new RuntimeException(e); + } + context.restoreAuthSystemState(); + if (collections != null && collections.size() > 0) { + return collections.get(0); + } + if (community != null) { + for (Community subCommunity : community.getSubcommunities()) { + Collection collection = retrieveCollectionWithSubmitByCommunityAndEntityType(context, + subCommunity, entityType); + if (collection != null) { + return collection; + } + } + } + return null; + } + + @Override + public List findCollectionsWithSubmit(String q, Context context, Community community, String entityType, + int offset, int limit) throws SQLException, SearchServiceException { + List collections = new ArrayList<>(); + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.setStart(offset); + discoverQuery.setMaxResults(limit); + discoverQuery.setSortField(SOLR_SORT_FIELD, SORT_ORDER.asc); + DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery, + entityType, community, q); + for (IndexableObject solrCollections : resp.getIndexableObjects()) { + Collection c = ((IndexableCollection) solrCollections).getIndexedObject(); + collections.add(c); + } + return collections; + } + + @Override + public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType) + throws SQLException, SearchServiceException { + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setMaxResults(0); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + DiscoverResult resp = retrieveCollectionsWithSubmit(context, discoverQuery, entityType, community, q); + return (int) resp.getTotalSearchResults(); + } + + @Override + @SuppressWarnings("rawtypes") + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException { + List collectionList = new ArrayList<>(); + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.addFilterQueries("dspace.entity.type:" + entityType); + + DiscoverResult discoverResult = searchService.search(context, discoverQuery); + List solrIndexableObjects = discoverResult.getIndexableObjects(); + + for (IndexableObject solrCollection : solrIndexableObjects) { + Collection c = ((IndexableCollection) solrCollection).getIndexedObject(); + collectionList.add(c); + } + return collectionList; + } + + /** + * Returns total collection archived items + * + * @param collection Collection + * @return total collection archived items + * @throws ItemCountException + */ + @Override + public int countArchivedItems(Collection collection) throws ItemCountException { + return ItemCounter.getInstance().getCount(collection); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/Community.java b/dspace-api/src/main/java/org/dspace/content/Community.java index 810caaf4fdfa..dd6d978936df 100644 --- a/dspace-api/src/main/java/org/dspace/content/Community.java +++ b/dspace-api/src/main/java/org/dspace/content/Community.java @@ -11,6 +11,7 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.UUID; import javax.persistence.Cacheable; import javax.persistence.CascadeType; import javax.persistence.Column; @@ -24,7 +25,7 @@ import javax.persistence.Transient; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.apache.logging.log4j.Logger; +import org.dspace.browse.ItemCountException; import org.dspace.content.comparator.NameAscendingComparator; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CommunityService; @@ -42,18 +43,12 @@ * update is called. * * @author Robert Tansley - * @version $Revision$ */ @Entity @Table(name = "community") @Cacheable @org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy") public class Community extends DSpaceObject implements DSpaceObjectLegacySupport { - /** - * log4j category - */ - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(Community.class); - @Column(name = "community_id", insertable = false, updatable = false) private Integer legacyId; @@ -96,6 +91,16 @@ protected Community() { } + /** + * Takes a pre-determined UUID to be passed to the object to allow for the + * restoration of previously defined UUID's. + * + * @param uuid Takes a uuid to be passed to the Pre-Defined UUID Generator + */ + protected Community(UUID uuid) { + this.predefinedUUID = uuid; + } + void addSubCommunity(Community subCommunity) { subCommunities.add(subCommunity); setModified(); @@ -215,7 +220,7 @@ public void removeParentCommunity(Community parentCommunity) { */ @Override public boolean equals(Object other) { - if (other == null) { + if (!(other instanceof Community)) { return false; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(other); @@ -223,11 +228,7 @@ public boolean equals(Object other) { return false; } final Community otherCommunity = (Community) other; - if (!this.getID().equals(otherCommunity.getID())) { - return false; - } - - return true; + return this.getID().equals(otherCommunity.getID()); } @Override @@ -264,4 +265,16 @@ private CommunityService getCommunityService() { return communityService; } + /** + * return count of the community items + * + * @return int + */ + public int countArchivedItems() { + try { + return communityService.countArchivedItems(this); + } catch (ItemCountException e) { + throw new RuntimeException(e); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java index 73b1c062fd2e..045adc229e79 100644 --- a/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CommunityServiceImpl.java @@ -24,6 +24,8 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; +import org.dspace.browse.ItemCountException; +import org.dspace.browse.ItemCounter; import org.dspace.content.dao.CommunityDAO; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.CollectionService; @@ -33,9 +35,10 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.I18nUtil; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; +import org.dspace.eperson.service.SubscribeService; import org.dspace.event.Event; import org.dspace.identifier.IdentifierException; import org.dspace.identifier.service.IdentifierService; @@ -73,10 +76,11 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl imp protected SiteService siteService; @Autowired(required = true) protected IdentifierService identifierService; + @Autowired(required = true) + protected SubscribeService subscribeService; protected CommunityServiceImpl() { super(); - } @Override @@ -86,13 +90,24 @@ public Community create(Community parent, Context context) throws SQLException, @Override public Community create(Community parent, Context context, String handle) throws SQLException, AuthorizeException { + return create(parent, context, handle, null); + } + + @Override + public Community create(Community parent, Context context, String handle, + UUID uuid) throws SQLException, AuthorizeException { if (!(authorizeService.isAdmin(context) || - (parent != null && authorizeService.authorizeActionBoolean(context, parent, Constants.ADD)))) { + (parent != null && authorizeService.authorizeActionBoolean(context, parent, Constants.ADD)))) { throw new AuthorizeException( - "Only administrators can create communities"); + "Only administrators can create communities"); } - Community newCommunity = communityDAO.create(context, new Community()); + Community newCommunity; + if (uuid != null) { + newCommunity = communityDAO.create(context, new Community(uuid)); + } else { + newCommunity = communityDAO.create(context, new Community()); + } if (parent != null) { parent.addSubCommunity(newCommunity); @@ -128,9 +143,9 @@ public Community create(Community parent, Context context, String handle) throws getIdentifiers(context, newCommunity))); } - log.info(LogManager.getHeader(context, "create_community", - "community_id=" + newCommunity.getID()) - + ",handle=" + newCommunity.getHandle()); + log.info(LogHelper.getHeader(context, "create_community", + "community_id=" + newCommunity.getID()) + + ",handle=" + newCommunity.getHandle()); return newCommunity; } @@ -195,7 +210,7 @@ public void setMetadataSingleValue(Context context, Community community, * whitespace. */ if (value == null) { - clearMetadata(context, community, field.SCHEMA, field.ELEMENT, field.QUALIFIER, Item.ANY); + clearMetadata(context, community, field.schema, field.element, field.qualifier, Item.ANY); community.setMetadataModified(); } else { super.setMetadataSingleValue(context, community, field, null, value); @@ -206,19 +221,19 @@ public void setMetadataSingleValue(Context context, Community community, @Override public Bitstream setLogo(Context context, Community community, InputStream is) - throws AuthorizeException, IOException, SQLException { + throws AuthorizeException, IOException, SQLException { // Check authorisation // authorized to remove the logo when DELETE rights // authorized when canEdit if (!((is == null) && authorizeService.authorizeActionBoolean( - context, community, Constants.DELETE))) { + context, community, Constants.DELETE))) { canEdit(context, community); } // First, delete any existing logo Bitstream oldLogo = community.getLogo(); if (oldLogo != null) { - log.info(LogManager.getHeader(context, "remove_logo", + log.info(LogHelper.getHeader(context, "remove_logo", "community_id=" + community.getID())); community.setLogo(null); bitstreamService.delete(context, oldLogo); @@ -231,10 +246,10 @@ public Bitstream setLogo(Context context, Community community, InputStream is) // now create policy for logo bitstream // to match our READ policy List policies = authorizeService - .getPoliciesActionFilter(context, community, Constants.READ); + .getPoliciesActionFilter(context, community, Constants.READ); authorizeService.addPolicies(context, policies, newLogo); - log.info(LogManager.getHeader(context, "set_logo", + log.info(LogHelper.getHeader(context, "set_logo", "community_id=" + community.getID() + "logo_bitstream_id=" + newLogo.getID())); } @@ -247,7 +262,7 @@ public void update(Context context, Community community) throws SQLException, Au // Check authorisation canEdit(context, community); - log.info(LogManager.getHeader(context, "update_community", + log.info(LogHelper.getHeader(context, "update_community", "community_id=" + community.getID())); super.update(context, community); @@ -287,6 +302,8 @@ public Group createAdministrators(Context context, Community community) throws S // register this as the admin group community.setAdmins(admins); + context.addEvent(new Event(Event.MODIFY, Constants.COMMUNITY, community.getID(), + null, getIdentifiers(context, community))); return admins; } @@ -302,6 +319,8 @@ public void removeAdministrators(Context context, Community community) throws SQ // Remove the link to the community table. community.setAdmins(null); + context.addEvent(new Event(Event.MODIFY, Constants.COMMUNITY, community.getID(), + null, getIdentifiers(context, community))); } @Override @@ -365,7 +384,7 @@ public void addCollection(Context context, Community community, Collection colle // Check authorisation authorizeService.authorizeAction(context, community, Constants.ADD); - log.info(LogManager.getHeader(context, "add_collection", + log.info(LogHelper.getHeader(context, "add_collection", "community_id=" + community.getID() + ",collection_id=" + collection.getID())); if (!community.getCollections().contains(collection)) { @@ -379,17 +398,26 @@ public void addCollection(Context context, Community community, Collection colle @Override public Community createSubcommunity(Context context, Community parentCommunity) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { return createSubcommunity(context, parentCommunity, null); } + @Override public Community createSubcommunity(Context context, Community parentCommunity, String handle) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { + return createSubcommunity(context, parentCommunity, handle, null); + } + + @Override + public Community createSubcommunity(Context context, Community parentCommunity, String handle, + UUID uuid) throws SQLException, AuthorizeException { // Check authorisation authorizeService.authorizeAction(context, parentCommunity, Constants.ADD); - Community c = create(parentCommunity, context, handle); + Community c; + c = create(parentCommunity, context, handle, uuid); + addSubcommunity(context, parentCommunity, c); return c; @@ -401,7 +429,7 @@ public void addSubcommunity(Context context, Community parentCommunity, Communit // Check authorisation authorizeService.authorizeAction(context, parentCommunity, Constants.ADD); - log.info(LogManager.getHeader(context, "add_subcommunity", + log.info(LogHelper.getHeader(context, "add_subcommunity", "parent_comm_id=" + parentCommunity.getID() + ",child_comm_id=" + childCommunity .getID())); @@ -431,7 +459,7 @@ public void removeCollection(Context context, Community community, Collection co collection.removeCommunity(community); } - log.info(LogManager.getHeader(context, "remove_collection", + log.info(LogHelper.getHeader(context, "remove_collection", "community_id=" + community.getID() + ",collection_id=" + collection.getID())); // Remove any mappings @@ -451,7 +479,7 @@ public void removeSubcommunity(Context context, Community parentCommunity, Commu rawDelete(context, childCommunity); - log.info(LogManager.getHeader(context, "remove_subcommunity", + log.info(LogHelper.getHeader(context, "remove_subcommunity", "parent_comm_id=" + parentCommunity.getID() + ",child_comm_id=" + childCommunity .getID())); @@ -519,12 +547,14 @@ public int getSupportsTypeConstant() { */ protected void rawDelete(Context context, Community community) throws SQLException, AuthorizeException, IOException { - log.info(LogManager.getHeader(context, "delete_community", + log.info(LogHelper.getHeader(context, "delete_community", "community_id=" + community.getID())); context.addEvent(new Event(Event.DELETE, Constants.COMMUNITY, community.getID(), community.getHandle(), getIdentifiers(context, community))); + subscribeService.deleteByDspaceObject(context, community); + // Remove collections Iterator collections = community.getCollections().iterator(); @@ -664,10 +694,15 @@ public void updateLastModified(Context context, Community community) { @Override public Community findByIdOrLegacyId(Context context, String id) throws SQLException { - if (StringUtils.isNumeric(id)) { - return findByLegacyId(context, Integer.parseInt(id)); - } else { - return find(context, UUID.fromString(id)); + try { + if (StringUtils.isNumeric(id)) { + return findByLegacyId(context, Integer.parseInt(id)); + } else { + return find(context, UUID.fromString(id)); + } + } catch (IllegalArgumentException e) { + // Not a valid legacy ID or valid UUID + return null; } } @@ -680,4 +715,16 @@ public Community findByLegacyId(Context context, int id) throws SQLException { public int countTotal(Context context) throws SQLException { return communityDAO.countRows(context); } + + /** + * Returns total community archived items + * + * @param community Community + * @return total community archived items + * @throws ItemCountException + */ + @Override + public int countArchivedItems(Community community) throws ItemCountException { + return ItemCounter.getInstance().getCount(community); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/DCDate.java b/dspace-api/src/main/java/org/dspace/content/DCDate.java index 4acccb2d8444..d58aff7b1e22 100644 --- a/dspace-api/src/main/java/org/dspace/content/DCDate.java +++ b/dspace-api/src/main/java/org/dspace/content/DCDate.java @@ -34,12 +34,11 @@ * There are four levels of granularity, depending on how much date information * is available: year, month, day, time. *

    - * Examples: 1994-05-03T15:30:24,1995-10-04, - * 2001-10,1975 + * Examples: {@code 1994-05-03T15:30:24}, {@code 1995-10-04}, + * {@code 2001-10}, {@code 1975} * * @author Robert Tansley * @author Larry Stone - * @version $Revision$ */ public class DCDate { /** @@ -262,7 +261,7 @@ private synchronized Date tryParse(SimpleDateFormat sdf, String source) { * @return the year */ public int getYear() { - return (!withinGranularity(DateGran.YEAR)) ? -1 : localCalendar.get(Calendar.YEAR); + return !withinGranularity(DateGran.YEAR) ? -1 : localCalendar.get(Calendar.YEAR); } /** @@ -271,7 +270,7 @@ public int getYear() { * @return the month */ public int getMonth() { - return (!withinGranularity(DateGran.MONTH)) ? -1 : localCalendar.get(Calendar.MONTH) + 1; + return !withinGranularity(DateGran.MONTH) ? -1 : localCalendar.get(Calendar.MONTH) + 1; } /** @@ -280,7 +279,7 @@ public int getMonth() { * @return the day */ public int getDay() { - return (!withinGranularity(DateGran.DAY)) ? -1 : localCalendar.get(Calendar.DAY_OF_MONTH); + return !withinGranularity(DateGran.DAY) ? -1 : localCalendar.get(Calendar.DAY_OF_MONTH); } /** @@ -289,7 +288,7 @@ public int getDay() { * @return the hour */ public int getHour() { - return (!withinGranularity(DateGran.TIME)) ? -1 : localCalendar.get(Calendar.HOUR_OF_DAY); + return !withinGranularity(DateGran.TIME) ? -1 : localCalendar.get(Calendar.HOUR_OF_DAY); } /** @@ -298,7 +297,7 @@ public int getHour() { * @return the minute */ public int getMinute() { - return (!withinGranularity(DateGran.TIME)) ? -1 : localCalendar.get(Calendar.MINUTE); + return !withinGranularity(DateGran.TIME) ? -1 : localCalendar.get(Calendar.MINUTE); } /** @@ -307,7 +306,7 @@ public int getMinute() { * @return the second */ public int getSecond() { - return (!withinGranularity(DateGran.TIME)) ? -1 : localCalendar.get(Calendar.SECOND); + return !withinGranularity(DateGran.TIME) ? -1 : localCalendar.get(Calendar.SECOND); } /** @@ -316,7 +315,7 @@ public int getSecond() { * @return the year */ public int getYearUTC() { - return (!withinGranularity(DateGran.YEAR)) ? -1 : calendar.get(Calendar.YEAR); + return !withinGranularity(DateGran.YEAR) ? -1 : calendar.get(Calendar.YEAR); } /** @@ -325,7 +324,7 @@ public int getYearUTC() { * @return the month */ public int getMonthUTC() { - return (!withinGranularity(DateGran.MONTH)) ? -1 : calendar.get(Calendar.MONTH) + 1; + return !withinGranularity(DateGran.MONTH) ? -1 : calendar.get(Calendar.MONTH) + 1; } /** @@ -334,7 +333,7 @@ public int getMonthUTC() { * @return the day */ public int getDayUTC() { - return (!withinGranularity(DateGran.DAY)) ? -1 : calendar.get(Calendar.DAY_OF_MONTH); + return !withinGranularity(DateGran.DAY) ? -1 : calendar.get(Calendar.DAY_OF_MONTH); } /** @@ -343,7 +342,7 @@ public int getDayUTC() { * @return the hour */ public int getHourUTC() { - return (!withinGranularity(DateGran.TIME)) ? -1 : calendar.get(Calendar.HOUR_OF_DAY); + return !withinGranularity(DateGran.TIME) ? -1 : calendar.get(Calendar.HOUR_OF_DAY); } /** @@ -352,7 +351,7 @@ public int getHourUTC() { * @return the minute */ public int getMinuteUTC() { - return (!withinGranularity(DateGran.TIME)) ? -1 : calendar.get(Calendar.MINUTE); + return !withinGranularity(DateGran.TIME) ? -1 : calendar.get(Calendar.MINUTE); } /** @@ -361,15 +360,15 @@ public int getMinuteUTC() { * @return the second */ public int getSecondUTC() { - return (!withinGranularity(DateGran.TIME)) ? -1 : calendar.get(Calendar.SECOND); + return !withinGranularity(DateGran.TIME) ? -1 : calendar.get(Calendar.SECOND); } - /** * Get the date as a string to put back in the Dublin Core. Use the UTC/GMT calendar version. * * @return The date as a string. */ + @Override public String toString() { if (calendar == null) { return "null"; diff --git a/dspace-api/src/main/java/org/dspace/content/DCPersonName.java b/dspace-api/src/main/java/org/dspace/content/DCPersonName.java index cdcff55c37bc..cb9b5346ff69 100644 --- a/dspace-api/src/main/java/org/dspace/content/DCPersonName.java +++ b/dspace-api/src/main/java/org/dspace/content/DCPersonName.java @@ -18,7 +18,6 @@ * FIXME: No policy for dealing with "van"/"van der" and "Jr." * * @author Robert Tansley - * @version $Revision$ */ public class DCPersonName { /** @@ -89,8 +88,9 @@ public DCPersonName(String lastNameIn, String firstNamesIn) { * * @return the name, suitable for putting in the database */ + @Override public String toString() { - StringBuffer out = new StringBuffer(); + StringBuilder out = new StringBuilder(); if (lastName != null) { out.append(lastName); diff --git a/dspace-api/src/main/java/org/dspace/content/DCSeriesNumber.java b/dspace-api/src/main/java/org/dspace/content/DCSeriesNumber.java index bec81494be5f..37b9fb7d7d75 100644 --- a/dspace-api/src/main/java/org/dspace/content/DCSeriesNumber.java +++ b/dspace-api/src/main/java/org/dspace/content/DCSeriesNumber.java @@ -8,10 +8,9 @@ package org.dspace.content; /** - * Series and report number, as stored in relation.ispartofseries + * Series and report number, as stored in {@code relation.ispartofseries}. * * @author Robert Tansley - * @version $Id$ */ public class DCSeriesNumber { /** @@ -70,6 +69,7 @@ public DCSeriesNumber(String s, String n) { * * @return the series and number as they should be stored in the DB */ + @Override public String toString() { if (series == null) { return (null); diff --git a/dspace-api/src/main/java/org/dspace/content/DSpaceObject.java b/dspace-api/src/main/java/org/dspace/content/DSpaceObject.java index b740a6b82d5a..59217a109f66 100644 --- a/dspace-api/src/main/java/org/dspace/content/DSpaceObject.java +++ b/dspace-api/src/main/java/org/dspace/content/DSpaceObject.java @@ -38,8 +38,8 @@ @Table(name = "dspaceobject") public abstract class DSpaceObject implements Serializable, ReloadableEntity { @Id - @GeneratedValue(generator = "system-uuid") - @GenericGenerator(name = "system-uuid", strategy = "uuid2") + @GeneratedValue(generator = "predefined-uuid") + @GenericGenerator(name = "predefined-uuid", strategy = "org.dspace.content.PredefinedUUIDGenerator") @Column(name = "uuid", unique = true, nullable = false, insertable = true, updatable = false) protected java.util.UUID id; @@ -48,6 +48,12 @@ public abstract class DSpaceObject implements Serializable, ReloadableEntity metadata = new ArrayList<>(); @@ -61,7 +67,7 @@ public abstract class DSpaceObject implements Serializable, ReloadableEntity handles = new ArrayList<>(); @OneToMany(fetch = FetchType.LAZY, mappedBy = "dSpaceObject", cascade = CascadeType.ALL) - private List resourcePolicies = new ArrayList<>(); + private final List resourcePolicies = new ArrayList<>(); /** * True if anything else was changed since last update() @@ -76,6 +82,15 @@ public abstract class DSpaceObject implements Serializable, ReloadableEntity handle) { diff --git a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java index c34291c3dd99..2119959073f0 100644 --- a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java @@ -13,11 +13,11 @@ import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; -import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import java.util.function.Supplier; +import java.util.stream.Collectors; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang.NotImplementedException; @@ -95,20 +95,6 @@ public ArrayList getIdentifiers(Context context, T dso) { } } - if (log.isDebugEnabled()) { - StringBuilder dbgMsg = new StringBuilder(); - for (String id : identifiers) { - if (dbgMsg.capacity() == 0) { - dbgMsg.append("This DSO's Identifiers are: "); - } else { - dbgMsg.append(", "); - } - dbgMsg.append(id); - } - dbgMsg.append("."); - log.debug(dbgMsg.toString()); - } - return identifiers; } @@ -140,6 +126,11 @@ public List getMetadata(T dso, String schema, String element, Str } } + // Sort the metadataValues if they have been modified, + // is used to preserve the default order. + if (dso.isMetadataModified()) { + values.sort(MetadataValueComparators.defaultComparator); + } // Create an array of matching values return values; } @@ -257,67 +248,64 @@ public List addMetadata(Context context, T dso, MetadataField met boolean authorityControlled = metadataAuthorityService.isAuthorityControlled(metadataField); boolean authorityRequired = metadataAuthorityService.isAuthorityRequired(metadataField); - List newMetadata = new ArrayList<>(values.size()); + List newMetadata = new ArrayList<>(); // We will not verify that they are valid entries in the registry // until update() is called. for (int i = 0; i < values.size(); i++) { - - if (authorities != null && authorities.size() >= i) { - if (StringUtils.startsWith(authorities.get(i), Constants.VIRTUAL_AUTHORITY_PREFIX)) { - continue; - } - } - MetadataValue metadataValue = metadataValueService.create(context, dso, metadataField); - newMetadata.add(metadataValue); - - metadataValue.setPlace(placeSupplier.get()); - - metadataValue.setLanguage(lang == null ? null : lang.trim()); - - // Logic to set Authority and Confidence: - // - normalize an empty string for authority to NULL. - // - if authority key is present, use given confidence or NOVALUE if not given - // - otherwise, preserve confidence if meaningful value was given since it may document a failed - // authority lookup - // - CF_UNSET signifies no authority nor meaningful confidence. - // - it's possible to have empty authority & CF_ACCEPTED if e.g. user deletes authority key - if (authorityControlled) { - if (authorities != null && authorities.get(i) != null && authorities.get(i).length() > 0) { - metadataValue.setAuthority(authorities.get(i)); - metadataValue.setConfidence(confidences == null ? Choices.CF_NOVALUE : confidences.get(i)); - } else { - metadataValue.setAuthority(null); - metadataValue.setConfidence(confidences == null ? Choices.CF_UNSET : confidences.get(i)); + if (values.get(i) != null) { + if (authorities != null && authorities.size() >= i) { + if (StringUtils.startsWith(authorities.get(i), Constants.VIRTUAL_AUTHORITY_PREFIX)) { + continue; + } } - // authority sanity check: if authority is required, was it supplied? - // XXX FIXME? can't throw a "real" exception here without changing all the callers to expect it, so - // use a runtime exception - if (authorityRequired && (metadataValue.getAuthority() == null || metadataValue.getAuthority() - .length() == 0)) { - throw new IllegalArgumentException("The metadata field \"" + metadataField - .toString() + "\" requires an authority key but none was provided. Value=\"" + values - .get(i) + "\""); + MetadataValue metadataValue = metadataValueService.create(context, dso, metadataField); + newMetadata.add(metadataValue); + + metadataValue.setPlace(placeSupplier.get()); + + metadataValue.setLanguage(lang == null ? null : lang.trim()); + + // Logic to set Authority and Confidence: + // - normalize an empty string for authority to NULL. + // - if authority key is present, use given confidence or NOVALUE if not given + // - otherwise, preserve confidence if meaningful value was given since it may document a failed + // authority lookup + // - CF_UNSET signifies no authority nor meaningful confidence. + // - it's possible to have empty authority & CF_ACCEPTED if e.g. user deletes authority key + if (authorityControlled) { + if (authorities != null && authorities.get(i) != null && authorities.get(i).length() > 0) { + metadataValue.setAuthority(authorities.get(i)); + metadataValue.setConfidence(confidences == null ? Choices.CF_NOVALUE : confidences.get(i)); + } else { + metadataValue.setAuthority(null); + metadataValue.setConfidence(confidences == null ? Choices.CF_UNSET : confidences.get(i)); + } + // authority sanity check: if authority is required, was it supplied? + // XXX FIXME? can't throw a "real" exception here without changing all the callers to expect it, so + // use a runtime exception + if (authorityRequired && (metadataValue.getAuthority() == null || metadataValue.getAuthority() + .length() == 0)) { + throw new IllegalArgumentException("The metadata field \"" + metadataField + .toString() + "\" requires an authority key but none was provided. Value=\"" + values + .get(i) + "\""); + } } - } - if (values.get(i) != null) { // remove control unicode char String temp = values.get(i).trim(); char[] dcvalue = temp.toCharArray(); for (int charPos = 0; charPos < dcvalue.length; charPos++) { if (Character.isISOControl(dcvalue[charPos]) && - !String.valueOf(dcvalue[charPos]).equals("\u0009") && - !String.valueOf(dcvalue[charPos]).equals("\n") && - !String.valueOf(dcvalue[charPos]).equals("\r")) { + !String.valueOf(dcvalue[charPos]).equals("\u0009") && + !String.valueOf(dcvalue[charPos]).equals("\n") && + !String.valueOf(dcvalue[charPos]).equals("\r")) { dcvalue[charPos] = ' '; } } metadataValue.setValue(String.valueOf(dcvalue)); - } else { - metadataValue.setValue(null); - } - //An update here isn't needed, this is persited upon the merge of the owning object + //An update here isn't needed, this is persited upon the merge of the owning object // metadataValueService.update(context, metadataValue); - dso.addDetails(metadataField.toString()); + dso.addDetails(metadataField.toString()); + } } setMetadataModified(dso); return newMetadata; @@ -435,7 +423,7 @@ public String getMetadataFirstValue(T dso, String schema, String element, String @Override public String getMetadataFirstValue(T dso, MetadataFieldName field, String language) { List metadataValues - = getMetadata(dso, field.SCHEMA, field.ELEMENT, field.QUALIFIER, language); + = getMetadata(dso, field.schema, field.element, field.qualifier, language); if (CollectionUtils.isNotEmpty(metadataValues)) { return metadataValues.get(0).getValue(); } @@ -462,11 +450,11 @@ public void setMetadataSingleValue(Context context, T dso, MetadataFieldName fie String language, String value) throws SQLException { if (value != null) { - clearMetadata(context, dso, field.SCHEMA, field.ELEMENT, field.QUALIFIER, + clearMetadata(context, dso, field.schema, field.element, field.qualifier, language); - String newValueLanguage = (Item.ANY.equals(language)) ? null : language; - addMetadata(context, dso, field.SCHEMA, field.ELEMENT, field.QUALIFIER, + String newValueLanguage = Item.ANY.equals(language) ? null : language; + addMetadata(context, dso, field.schema, field.element, field.qualifier, newValueLanguage, value); dso.setMetadataModified(); } @@ -559,7 +547,7 @@ protected String[] getElements(String fieldName) { int add = 4 - tokens.length; if (add > 0) { - tokens = (String[]) ArrayUtils.addAll(tokens, new String[add]); + tokens = ArrayUtils.addAll(tokens, new String[add]); } return tokens; @@ -610,7 +598,7 @@ public void update(Context context, T dso) throws SQLException, AuthorizeExcepti */ // A map created to store the latest place for each metadata field Map fieldToLastPlace = new HashMap<>(); - List metadataValues = new LinkedList<>(); + List metadataValues; if (dso.getType() == Constants.ITEM) { metadataValues = getMetadata(dso, Item.ANY, Item.ANY, Item.ANY, Item.ANY); } else { @@ -620,38 +608,51 @@ public void update(Context context, T dso) throws SQLException, AuthorizeExcepti //If two places are the same then the MetadataValue instance will be placed before the //RelationshipMetadataValue instance. //This is done to ensure that the order is correct. - metadataValues.sort(new Comparator() { - @Override - public int compare(MetadataValue o1, MetadataValue o2) { - int compare = o1.getPlace() - o2.getPlace(); - if (compare == 0) { - if (o1 instanceof RelationshipMetadataValue) { - return 1; - } else if (o2 instanceof RelationshipMetadataValue) { - return -1; - } + metadataValues.sort((o1, o2) -> { + int compare = o1.getPlace() - o2.getPlace(); + if (compare == 0) { + if (o1 instanceof RelationshipMetadataValue && o2 instanceof RelationshipMetadataValue) { + return compare; + } else if (o1 instanceof RelationshipMetadataValue) { + return 1; + } else if (o2 instanceof RelationshipMetadataValue) { + return -1; } - return compare; } + return compare; }); for (MetadataValue metadataValue : metadataValues) { //Retrieve & store the place for each metadata value - if (StringUtils.startsWith(metadataValue.getAuthority(), Constants.VIRTUAL_AUTHORITY_PREFIX) && - ((RelationshipMetadataValue) metadataValue).isUseForPlace()) { + if ( + // For virtual MDVs with useForPlace=true, + // update both the place of the metadatum and the place of the Relationship. + // E.g. for an Author relationship, + // the place should be updated using the same principle as dc.contributor.author. + StringUtils.startsWith(metadataValue.getAuthority(), Constants.VIRTUAL_AUTHORITY_PREFIX) + && ((RelationshipMetadataValue) metadataValue).isUseForPlace() + ) { int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue); metadataValue.setPlace(mvPlace); String authority = metadataValue.getAuthority(); String relationshipId = StringUtils.split(authority, "::")[1]; Relationship relationship = relationshipService.find(context, Integer.parseInt(relationshipId)); - if (relationship.getLeftItem() == (Item) dso) { + if (relationship.getLeftItem().equals(dso)) { relationship.setLeftPlace(mvPlace); } else { relationship.setRightPlace(mvPlace); } relationshipService.update(context, relationship); - } else if (!StringUtils.startsWith(metadataValue.getAuthority(), - Constants.VIRTUAL_AUTHORITY_PREFIX)) { + } else if ( + // Otherwise, just set the place of the metadatum + // ...unless the metadatum in question is a relation.* metadatum. + // This case is a leftover from when a Relationship is removed and copied to metadata. + // If we let its place change the order of any remaining Relationships will be affected. + // todo: this makes it so these leftover MDVs can't be reordered later on + !StringUtils.equals( + metadataValue.getMetadataField().getMetadataSchema().getName(), "relation" + ) + ) { int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue); metadataValue.setPlace(mvPlace); } @@ -742,12 +743,15 @@ public void addAndShiftRightMetadata(Context context, T dso, String schema, Stri @Override public void moveMetadata(Context context, T dso, String schema, String element, String qualifier, int from, int to) throws SQLException, IllegalArgumentException { - if (from == to) { throw new IllegalArgumentException("The \"from\" location MUST be different from \"to\" location"); } - List list = getMetadata(dso, schema, element, qualifier); + List list = + getMetadata(dso, schema, element, qualifier).stream() + .sorted(Comparator.comparing(MetadataValue::getPlace)) + .collect(Collectors.toList()); + if (from >= list.size() || to >= list.size() || to < 0 || from < 0) { throw new IllegalArgumentException( diff --git a/dspace-api/src/main/java/org/dspace/content/EntityType.java b/dspace-api/src/main/java/org/dspace/content/EntityType.java index d44ec5a35dca..20ab758a0b76 100644 --- a/dspace-api/src/main/java/org/dspace/content/EntityType.java +++ b/dspace-api/src/main/java/org/dspace/content/EntityType.java @@ -78,6 +78,7 @@ public void setLabel(String label) { * * @return The ID for this EntityType */ + @Override public Integer getID() { return id; } @@ -87,6 +88,7 @@ public Integer getID() { * @param obj object to be compared * @return */ + @Override public boolean equals(Object obj) { if (!(obj instanceof EntityType)) { return false; @@ -97,10 +99,7 @@ public boolean equals(Object obj) { return false; } - if (!StringUtils.equals(this.getLabel(), entityType.getLabel())) { - return false; - } - return true; + return StringUtils.equals(this.getLabel(), entityType.getLabel()); } /** diff --git a/dspace-api/src/main/java/org/dspace/content/EntityTypeServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/EntityTypeServiceImpl.java index 4577054ff063..0e0c6d51e501 100644 --- a/dspace-api/src/main/java/org/dspace/content/EntityTypeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/EntityTypeServiceImpl.java @@ -7,16 +7,31 @@ */ package org.dspace.content; +import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; +import java.util.Set; import org.apache.commons.collections.CollectionUtils; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.response.FacetField; +import org.apache.solr.client.solrj.response.FacetField.Count; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.params.FacetParams; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.dao.EntityTypeDAO; import org.dspace.content.service.EntityTypeService; +import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.discovery.SolrSearchCore; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; import org.springframework.beans.factory.annotation.Autowired; public class EntityTypeServiceImpl implements EntityTypeService { @@ -27,6 +42,12 @@ public class EntityTypeServiceImpl implements EntityTypeService { @Autowired(required = true) protected AuthorizeService authorizeService; + @Autowired + protected GroupService groupService; + + @Autowired + protected SolrSearchCore solrSearchCore; + @Override public EntityType findByEntityType(Context context, String entityType) throws SQLException { return entityTypeDAO.findByEntityType(context, entityType); @@ -98,4 +119,63 @@ public void delete(Context context,EntityType entityType) throws SQLException, A } entityTypeDAO.delete(context, entityType); } + + @Override + public List getSubmitAuthorizedTypes(Context context) + throws SQLException, SolrServerException, IOException { + List types = new ArrayList<>(); + StringBuilder query = new StringBuilder(); + org.dspace.eperson.EPerson currentUser = context.getCurrentUser(); + if (!authorizeService.isAdmin(context)) { + String userId = ""; + if (currentUser != null) { + userId = currentUser.getID().toString(); + } + query.append("submit:(e").append(userId); + Set groups = groupService.allMemberGroupsSet(context, currentUser); + for (Group group : groups) { + query.append(" OR g").append(group.getID()); + } + query.append(")"); + } else { + query.append("*:*"); + } + + SolrQuery sQuery = new SolrQuery(query.toString()); + sQuery.addFilterQuery("search.resourcetype:" + IndexableCollection.TYPE); + sQuery.setRows(0); + sQuery.addFacetField("search.entitytype"); + sQuery.setFacetMinCount(1); + sQuery.setFacetLimit(Integer.MAX_VALUE); + sQuery.setFacetSort(FacetParams.FACET_SORT_INDEX); + QueryResponse qResp = solrSearchCore.getSolr().query(sQuery); + FacetField facetField = qResp.getFacetField("search.entitytype"); + if (Objects.nonNull(facetField)) { + for (Count c : facetField.getValues()) { + types.add(c.getName()); + } + } + return types; + } + + @Override + public List getEntityTypesByNames(Context context, List names, Integer limit, Integer offset) + throws SQLException { + return entityTypeDAO.getEntityTypesByNames(context, names, limit, offset); + } + + @Override + public int countEntityTypesByNames(Context context, List names) throws SQLException { + return entityTypeDAO.countEntityTypesByNames(context, names); + } + + @Override + public void initDefaultEntityTypeNames(Context context) throws SQLException, AuthorizeException { + EntityType noneEntityType = this.findByEntityType(context, Constants.ENTITY_TYPE_NONE); + if (Objects.isNull(noneEntityType)) { + noneEntityType = this.create(context, Constants.ENTITY_TYPE_NONE); + this.update(context, noneEntityType); + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/FeedbackServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/FeedbackServiceImpl.java new file mode 100644 index 000000000000..7e34af132b0a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/FeedbackServiceImpl.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; +import java.io.IOException; +import java.util.Date; +import java.util.Objects; +import javax.mail.MessagingException; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang.StringUtils; +import org.dspace.content.service.FeedbackService; +import org.dspace.core.Context; +import org.dspace.core.Email; +import org.dspace.core.I18nUtil; + +/** + * Implementation of {@link FeedbackService} interface. + * It is responsible for sendint a feedback email with content a DSpace user + * fills from feedback section of DSpace. + */ +public class FeedbackServiceImpl implements FeedbackService { + + @Override + public void sendEmail(Context context, HttpServletRequest request, String recipientEmail, String senderEmail, + String message, String page) throws IOException, MessagingException { + String session = request.getHeader("x-correlation-id"); + String agent = request.getHeader("User-Agent"); + String currentUserEmail = StringUtils.EMPTY; + + if (Objects.nonNull(context.getCurrentUser())) { + currentUserEmail = context.getCurrentUser().getEmail(); + } + Email email = Email.getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "feedback")); + email.addRecipient(recipientEmail); + email.addArgument(new Date()); // Date + email.addArgument(senderEmail); // Email + email.addArgument(currentUserEmail); // Logged in as + email.addArgument(page); // Referring page + email.addArgument(agent); // User agent + email.addArgument(session); // Session ID + email.addArgument(message); // The feedback itself + email.send(); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/InProgressSubmission.java b/dspace-api/src/main/java/org/dspace/content/InProgressSubmission.java index 5e7a04c4c9a3..42ef449c7d89 100644 --- a/dspace-api/src/main/java/org/dspace/content/InProgressSubmission.java +++ b/dspace-api/src/main/java/org/dspace/content/InProgressSubmission.java @@ -17,7 +17,6 @@ * which stage of submission they are (in workspace or workflow system) * * @author Robert Tansley - * @version $Revision$ */ public interface InProgressSubmission extends ReloadableEntity { /** @@ -25,6 +24,7 @@ public interface InProgressSubmission extends ReloadableEntity { * * @return the internal identifier */ + @Override Integer getID(); /** diff --git a/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java index 11cd4c107c34..1aadbea162a5 100644 --- a/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/InstallItemServiceImpl.java @@ -10,9 +10,14 @@ import java.io.IOException; import java.sql.SQLException; import java.util.List; +import java.util.Map; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.logic.Filter; +import org.dspace.content.logic.FilterUtils; import org.dspace.content.service.CollectionService; import org.dspace.content.service.InstallItemService; import org.dspace.content.service.ItemService; @@ -20,8 +25,11 @@ import org.dspace.core.Context; import org.dspace.embargo.service.EmbargoService; import org.dspace.event.Event; +import org.dspace.identifier.Identifier; import org.dspace.identifier.IdentifierException; import org.dspace.identifier.service.IdentifierService; +import org.dspace.supervision.SupervisionOrder; +import org.dspace.supervision.service.SupervisionOrderService; import org.springframework.beans.factory.annotation.Autowired; /** @@ -42,9 +50,13 @@ public class InstallItemServiceImpl implements InstallItemService { protected IdentifierService identifierService; @Autowired(required = true) protected ItemService itemService; + @Autowired(required = true) + protected SupervisionOrderService supervisionOrderService; + @Autowired(required = false) - protected InstallItemServiceImpl() { + Logger log = LogManager.getLogger(InstallItemServiceImpl.class); + protected InstallItemServiceImpl() { } @Override @@ -59,10 +71,14 @@ public Item installItem(Context c, InProgressSubmission is, AuthorizeException { Item item = is.getItem(); Collection collection = is.getCollection(); + // Get map of filters to use for identifier types. + Map, Filter> filters = FilterUtils.getIdentifierFilters(false); try { if (suppliedHandle == null) { - identifierService.register(c, item); + // Register with the filters we've set up + identifierService.register(c, item, filters); } else { + // This will register the handle but a pending DOI won't be compatible and so won't be registered identifierService.register(c, item, suppliedHandle); } } catch (IdentifierException e) { @@ -77,7 +93,7 @@ public Item installItem(Context c, InProgressSubmission is, // As this is a BRAND NEW item, as a final step we need to remove the // submitter item policies created during deposit and replace them with // the default policies from the collection. - itemService.inheritCollectionDefaultPolicies(c, item, collection); + itemService.inheritCollectionDefaultPolicies(c, item, collection, false); return item; } @@ -222,9 +238,19 @@ protected Item finishItem(Context c, Item item, InProgressSubmission is) // set embargo lift date and take away read access if indicated. embargoService.setEmbargo(c, item); + // delete all related supervision orders + deleteSupervisionOrders(c, item); + return item; } + private void deleteSupervisionOrders(Context c, Item item) throws SQLException, AuthorizeException { + List supervisionOrders = supervisionOrderService.findByItem(c, item); + for (SupervisionOrder supervisionOrder : supervisionOrders) { + supervisionOrderService.delete(c, supervisionOrder); + } + } + @Override public String getBitstreamProvenanceMessage(Context context, Item myitem) throws SQLException { @@ -245,4 +271,28 @@ public String getBitstreamProvenanceMessage(Context context, Item myitem) return myMessage.toString(); } + + @Override + public String getSubmittedByProvenanceMessage(Context context, Item item) throws SQLException { + // get date + DCDate now = DCDate.getCurrent(); + + // Create provenance description + StringBuffer provmessage = new StringBuffer(); + + if (item.getSubmitter() != null) { + provmessage.append("Submitted by ").append(item.getSubmitter().getFullName()) + .append(" (").append(item.getSubmitter().getEmail()).append(") on ") + .append(now.toString()); + } else { + // else, null submitter + provmessage.append("Submitted by unknown (probably automated) on") + .append(now.toString()); + } + provmessage.append("\n"); + + // add sizes and checksums of bitstreams + provmessage.append(getBitstreamProvenanceMessage(context, item)); + return provmessage.toString(); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/Item.java b/dspace-api/src/main/java/org/dspace/content/Item.java index ef1f378b641c..547ff490b84b 100644 --- a/dspace-api/src/main/java/org/dspace/content/Item.java +++ b/dspace-api/src/main/java/org/dspace/content/Item.java @@ -13,6 +13,7 @@ import java.util.HashSet; import java.util.List; import java.util.Set; +import java.util.UUID; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; @@ -27,8 +28,6 @@ import javax.persistence.TemporalType; import javax.persistence.Transient; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.dspace.content.comparator.NameAscendingComparator; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; @@ -49,17 +48,10 @@ * * @author Robert Tansley * @author Martin Hald - * @version $Revision$ */ @Entity @Table(name = "item") public class Item extends DSpaceObject implements DSpaceObjectLegacySupport { - - /** - * log4j logger - */ - private static final Logger log = LogManager.getLogger(); - /** * Wild card for Dublin Core metadata qualifiers/languages */ @@ -131,6 +123,16 @@ protected Item() { } + /** + * Takes a pre-determined UUID to be passed to the object to allow for the + * restoration of previously defined UUID's. + * + * @param uuid Takes a uuid to be passed to the Pre-Defined UUID Generator + */ + protected Item(UUID uuid) { + this.predefinedUUID = uuid; + } + /** * Find out if the item is part of the main archive * @@ -297,7 +299,7 @@ public List getBundles() { * @return the bundles in an unordered array */ public List getBundles(String name) { - List matchingBundles = new ArrayList(); + List matchingBundles = new ArrayList<>(); // now only keep bundles with matching names List bunds = getBundles(); for (Bundle bundle : bunds) { @@ -328,7 +330,7 @@ void removeBundle(Bundle bundle) { /** * Return true if other is the same Item as - * this object, false otherwise + * this object, false otherwise. * * @param obj object to compare to * @return true if object passed in represents the same item @@ -336,7 +338,7 @@ void removeBundle(Bundle bundle) { */ @Override public boolean equals(Object obj) { - if (obj == null) { + if (!(obj instanceof Item)) { return false; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj); @@ -344,10 +346,7 @@ public boolean equals(Object obj) { return false; } final Item otherItem = (Item) obj; - if (!this.getID().equals(otherItem.getID())) { - return false; - } - return true; + return this.getID().equals(otherItem.getID()); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index 59beec72a616..9791f69abbc5 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -12,11 +12,12 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Comparator; import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.UUID; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -25,6 +26,8 @@ import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.dspace.app.requestitem.RequestItem; +import org.dspace.app.requestitem.service.RequestItemService; import org.dspace.app.util.AuthorizeUtil; import org.dspace.authorize.AuthorizeConfiguration; import org.dspace.authorize.AuthorizeException; @@ -39,6 +42,7 @@ import org.dspace.content.service.BundleService; import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; +import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.InstallItemService; import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataSchemaService; @@ -47,14 +51,33 @@ import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.eperson.service.SubscribeService; import org.dspace.event.Event; import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.service.HarvestedItemService; +import org.dspace.identifier.DOI; import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.service.DOIService; import org.dspace.identifier.service.IdentifierService; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.profile.service.ResearcherProfileService; +import org.dspace.qaevent.dao.QAEventsDAO; import org.dspace.services.ConfigurationService; import org.dspace.versioning.service.VersioningService; import org.dspace.workflow.WorkflowItemService; @@ -81,6 +104,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) protected CommunityService communityService; @Autowired(required = true) + protected GroupService groupService; + @Autowired(required = true) protected AuthorizeService authorizeService; @Autowired(required = true) protected BundleService bundleService; @@ -93,12 +118,16 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) protected InstallItemService installItemService; @Autowired(required = true) + protected SearchService searchService; + @Autowired(required = true) protected ResourcePolicyService resourcePolicyService; @Autowired(required = true) protected CollectionService collectionService; @Autowired(required = true) protected IdentifierService identifierService; @Autowired(required = true) + protected DOIService doiService; + @Autowired(required = true) protected VersioningService versioningService; @Autowired(required = true) protected HarvestedItemService harvestedItemService; @@ -119,6 +148,32 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) private RelationshipMetadataService relationshipMetadataService; + @Autowired(required = true) + private EntityTypeService entityTypeService; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Autowired(required = true) + private OrcidHistoryService orcidHistoryService; + + @Autowired(required = true) + private OrcidQueueService orcidQueueService; + + @Autowired(required = true) + private OrcidSynchronizationService orcidSynchronizationService; + + @Autowired(required = true) + private ResearcherProfileService researcherProfileService; + @Autowired(required = true) + private RequestItemService requestItemService; + + @Autowired(required = true) + protected SubscribeService subscribeService; + + @Autowired + private QAEventsDAO qaEventsDao; + protected ItemServiceImpl() { super(); } @@ -159,7 +214,7 @@ public Item find(Context context, UUID id) throws SQLException { Item item = itemDAO.findByID(context, Item.class, id); if (item == null) { if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_item", + log.debug(LogHelper.getHeader(context, "find_item", "not_found,item_id=" + id)); } return null; @@ -167,7 +222,7 @@ public Item find(Context context, UUID id) throws SQLException { // not null, return item if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_item", "item_id=" + log.debug(LogHelper.getHeader(context, "find_item", "item_id=" + id)); } @@ -176,16 +231,29 @@ public Item find(Context context, UUID id) throws SQLException { @Override public Item create(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException { + return create(context, workspaceItem, null); + } + + @Override + public Item create(Context context, WorkspaceItem workspaceItem, + UUID uuid) throws SQLException, AuthorizeException { + Collection collection = workspaceItem.getCollection(); + authorizeService.authorizeAction(context, collection, Constants.ADD); if (workspaceItem.getItem() != null) { throw new IllegalArgumentException( - "Attempting to create an item for a workspace item that already contains an item"); + "Attempting to create an item for a workspace item that already contains an item"); + } + Item item = null; + if (uuid != null) { + item = createItem(context, uuid); + } else { + item = createItem(context); } - Item item = createItem(context); workspaceItem.setItem(item); - log.info(LogManager.getHeader(context, "create_item", "item_id=" - + item.getID())); + log.info(LogHelper.getHeader(context, "create_item", "item_id=" + + item.getID())); return item; } @@ -202,7 +270,7 @@ public Item createTemplateItem(Context context, Collection collection) throws SQ collection.setTemplateItem(template); template.setTemplateItemOf(collection); - log.info(LogManager.getHeader(context, "create_template_item", + log.info(LogHelper.getHeader(context, "create_template_item", "collection_id=" + collection.getID() + ",template_item_id=" + template.getID())); @@ -227,6 +295,11 @@ public Iterator findAllUnfiltered(Context context) throws SQLException { return itemDAO.findAll(context, true, true); } + @Override + public Iterator findAllRegularItems(Context context) throws SQLException { + return itemDAO.findAllRegularItems(context); + } + @Override public Iterator findBySubmitter(Context context, EPerson eperson) throws SQLException { return itemDAO.findBySubmitter(context, eperson); @@ -340,7 +413,7 @@ public void addBundle(Context context, Item item, Bundle bundle) throws SQLExcep // Check authorisation authorizeService.authorizeAction(context, item, Constants.ADD); - log.info(LogManager.getHeader(context, "add_bundle", "item_id=" + log.info(LogHelper.getHeader(context, "add_bundle", "item_id=" + item.getID() + ",bundle_id=" + bundle.getID())); // Check it's not already there @@ -368,7 +441,7 @@ public void removeBundle(Context context, Item item, Bundle bundle) // Check authorisation authorizeService.authorizeAction(context, item, Constants.REMOVE); - log.info(LogManager.getHeader(context, "remove_bundle", "item_id=" + log.info(LogHelper.getHeader(context, "remove_bundle", "item_id=" + item.getID() + ",bundle_id=" + bundle.getID())); context.addEvent(new Event(Event.REMOVE, Constants.ITEM, item.getID(), @@ -418,6 +491,30 @@ public List getNonInternalBitstreams(Context context, Item item) thro return bitstreamList; } + protected Item createItem(Context context, UUID uuid) throws SQLException, AuthorizeException { + Item item; + if (uuid != null) { + item = itemDAO.create(context, new Item(uuid)); + } else { + item = itemDAO.create(context, new Item()); + } + // set discoverable to true (default) + item.setDiscoverable(true); + + // Call update to give the item a last modified date. OK this isn't + // amazingly efficient but creates don't happen that often. + context.turnOffAuthorisationSystem(); + update(context, item); + context.restoreAuthSystemState(); + + context.addEvent(new Event(Event.CREATE, Constants.ITEM, item.getID(), + null, getIdentifiers(context, item))); + + log.info(LogHelper.getHeader(context, "create_item", "item_id=" + item.getID())); + + return item; + } + protected Item createItem(Context context) throws SQLException, AuthorizeException { Item item = itemDAO.create(context, new Item()); // set discoverable to true (default) @@ -432,7 +529,7 @@ protected Item createItem(Context context) throws SQLException, AuthorizeExcepti context.addEvent(new Event(Event.CREATE, Constants.ITEM, item.getID(), null, getIdentifiers(context, item))); - log.info(LogManager.getHeader(context, "create_item", "item_id=" + item.getID())); + log.info(LogHelper.getHeader(context, "create_item", "item_id=" + item.getID())); return item; } @@ -490,7 +587,7 @@ public void update(Context context, Item item) throws SQLException, AuthorizeExc authorizeService.authorizeAction(context, item, Constants.WRITE); } - log.info(LogManager.getHeader(context, "update_item", "item_id=" + log.info(LogHelper.getHeader(context, "update_item", "item_id=" + item.getID())); super.update(context, item); @@ -595,7 +692,7 @@ public void withdraw(Context context, Item item) throws SQLException, AuthorizeE } // Write log - log.info(LogManager.getHeader(context, "withdraw_item", "user=" + log.info(LogHelper.getHeader(context, "withdraw_item", "user=" + e.getEmail() + ",item_id=" + item.getID())); } @@ -661,7 +758,7 @@ public void reinstate(Context context, Item item) throws SQLException, Authorize } // Write log - log.info(LogManager.getHeader(context, "reinstate_item", "user=" + log.info(LogHelper.getHeader(context, "reinstate_item", "user=" + e.getEmail() + ",item_id=" + item.getID())); } @@ -682,11 +779,12 @@ protected void rawDelete(Context context, Item item) throws AuthorizeException, context.addEvent(new Event(Event.DELETE, Constants.ITEM, item.getID(), item.getHandle(), getIdentifiers(context, item))); - log.info(LogManager.getHeader(context, "delete_item", "item_id=" + log.info(LogHelper.getHeader(context, "delete_item", "item_id=" + item.getID())); - + //remove subscription related with it + subscribeService.deleteByDspaceObject(context, item); // Remove relationships - for (Relationship relationship : relationshipService.findByItem(context, item)) { + for (Relationship relationship : relationshipService.findByItem(context, item, -1, -1, false, false)) { relationshipService.forceDelete(context, relationship, false, false); } @@ -696,9 +794,23 @@ protected void rawDelete(Context context, Item item) throws AuthorizeException, // Remove any Handle handleService.unbindHandle(context, item); + // Delete a DOI if linked to the item. + // If no DOI consumer or provider is configured, but a DOI remains linked to this item's uuid, + // hibernate will throw a foreign constraint exception. + // Here we use the DOI service directly as it is able to manage DOIs even without any configured + // consumer or provider. + DOI doi = doiService.findDOIByDSpaceObject(context, item); + if (doi != null) { + doi.setDSpaceObject(null); + } + // remove version attached to the item removeVersion(context, item); + removeRequest(context, item); + + removeOrcidSynchronizationStuff(context, item); + // Also delete the item if it appears in a harvested collection. HarvestedItem hi = harvestedItemService.find(context, item); @@ -706,6 +818,16 @@ protected void rawDelete(Context context, Item item) throws AuthorizeException, harvestedItemService.delete(context, hi); } + OrcidToken orcidToken = orcidTokenService.findByProfileItem(context, item); + if (orcidToken != null) { + orcidToken.setProfileItem(null); + } + + List qaEvents = qaEventsDao.findByItem(context, item); + for (QAEventProcessed qaEvent : qaEvents) { + qaEventsDao.delete(context, qaEvent); + } + //Only clear collections after we have removed everything else from the item item.clearCollections(); item.setOwningCollection(null); @@ -714,6 +836,14 @@ protected void rawDelete(Context context, Item item) throws AuthorizeException, itemDAO.delete(context, item); } + protected void removeRequest(Context context, Item item) throws SQLException { + Iterator requestItems = requestItemService.findByItem(context, item); + while (requestItems.hasNext()) { + RequestItem requestItem = requestItems.next(); + requestItemService.delete(context, requestItem); + } + } + @Override public void removeAllBundles(Context context, Item item) throws AuthorizeException, SQLException, IOException { Iterator bundles = item.getBundles().iterator(); @@ -731,7 +861,7 @@ protected void deleteBundle(Context context, Item item, Bundle b) bundleService.delete(context, b); - log.info(LogManager.getHeader(context, "remove_bundle", "item_id=" + log.info(LogHelper.getHeader(context, "remove_bundle", "item_id=" + item.getID() + ",bundle_id=" + b.getID())); context .addEvent(new Event(Event.REMOVE, Constants.ITEM, item.getID(), Constants.BUNDLE, b.getID(), b.getName())); @@ -799,51 +929,136 @@ public void removeGroupPolicies(Context context, Item item, Group group) throws @Override public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { - adjustItemPolicies(context, item, collection); - adjustBundleBitstreamPolicies(context, item, collection); + inheritCollectionDefaultPolicies(context, item, collection, true); + } + + @Override + public void inheritCollectionDefaultPolicies(Context context, Item item, Collection collection, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException { + + adjustItemPolicies(context, item, collection, replaceReadRPWithCollectionRP); + adjustBundleBitstreamPolicies(context, item, collection, replaceReadRPWithCollectionRP); - log.debug(LogManager.getHeader(context, "item_inheritCollectionDefaultPolicies", + log.debug(LogHelper.getHeader(context, "item_inheritCollectionDefaultPolicies", "item_id=" + item.getID())); } @Override public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { - List defaultCollectionPolicies = authorizeService + adjustBundleBitstreamPolicies(context, item, collection, true); + } + + @Override + public void adjustBundleBitstreamPolicies(Context context, Item item, Collection collection, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException { + // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files + // can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other + // policies or embargos applied + List defaultCollectionBundlePolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); + // Bitstreams should inherit from DEFAULT_BITSTREAM_READ + List defaultCollectionBitstreamPolicies = authorizeService .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); - if (defaultCollectionPolicies.size() < 1) { + List defaultItemPolicies = authorizeService.findPoliciesByDSOAndType(context, item, + ResourcePolicy.TYPE_CUSTOM); + if (defaultCollectionBitstreamPolicies.size() < 1) { throw new SQLException("Collection " + collection.getID() + " (" + collection.getHandle() + ")" + " has no default bitstream READ policies"); } + // TODO: should we also throw an exception if no DEFAULT_ITEM_READ? + + boolean removeCurrentReadRPBitstream = + replaceReadRPWithCollectionRP && defaultCollectionBitstreamPolicies.size() > 0; + boolean removeCurrentReadRPBundle = + replaceReadRPWithCollectionRP && defaultCollectionBundlePolicies.size() > 0; // remove all policies from bundles, add new ones // Remove bundles List bunds = item.getBundles(); for (Bundle mybundle : bunds) { + // If collection has default READ policies, remove the bundle's READ policies. + if (removeCurrentReadRPBundle) { + authorizeService.removePoliciesActionFilter(context, mybundle, Constants.READ); + } // if come from InstallItem: remove all submission/workflow policies authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_SUBMISSION); authorizeService.removeAllPoliciesByDSOAndType(context, mybundle, ResourcePolicy.TYPE_WORKFLOW); - addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionPolicies); + addCustomPoliciesNotInPlace(context, mybundle, defaultItemPolicies); + addDefaultPoliciesNotInPlace(context, mybundle, defaultCollectionBundlePolicies); for (Bitstream bitstream : mybundle.getBitstreams()) { + // If collection has default READ policies, remove the bundle's READ policies. + if (removeCurrentReadRPBitstream) { + authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ); + } + // if come from InstallItem: remove all submission/workflow policies - authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_SUBMISSION); - authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW); - addDefaultPoliciesNotInPlace(context, bitstream, defaultCollectionPolicies); + removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, + defaultCollectionBitstreamPolicies); } } } + @Override + public void adjustBitstreamPolicies(Context context, Item item, Collection collection, Bitstream bitstream) + throws SQLException, AuthorizeException { + adjustBitstreamPolicies(context, item, collection, bitstream, true); + } + + @Override + public void adjustBitstreamPolicies(Context context, Item item, Collection collection , Bitstream bitstream, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException { + List defaultCollectionPolicies = authorizeService + .getPoliciesActionFilter(context, collection, Constants.DEFAULT_BITSTREAM_READ); + + List defaultItemPolicies = authorizeService.findPoliciesByDSOAndType(context, item, + ResourcePolicy.TYPE_CUSTOM); + if (defaultCollectionPolicies.size() < 1) { + throw new SQLException("Collection " + collection.getID() + + " (" + collection.getHandle() + ")" + + " has no default bitstream READ policies"); + } + + // remove all policies from bitstream, add new ones + removeAllPoliciesAndAddDefault(context, bitstream, defaultItemPolicies, defaultCollectionPolicies); + } + + private void removeAllPoliciesAndAddDefault(Context context, Bitstream bitstream, + List defaultItemPolicies, + List defaultCollectionPolicies) + throws SQLException, AuthorizeException { + authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_SUBMISSION); + authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW); + addCustomPoliciesNotInPlace(context, bitstream, defaultItemPolicies); + addDefaultPoliciesNotInPlace(context, bitstream, defaultCollectionPolicies); + } + @Override public void adjustItemPolicies(Context context, Item item, Collection collection) throws SQLException, AuthorizeException { + adjustItemPolicies(context, item, collection, true); + } + + @Override + public void adjustItemPolicies(Context context, Item item, Collection collection, + boolean replaceReadRPWithCollectionRP) + throws SQLException, AuthorizeException { // read collection's default READ policies List defaultCollectionPolicies = authorizeService .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); + // If collection has defaultREAD policies, remove the item's READ policies. + if (replaceReadRPWithCollectionRP && defaultCollectionPolicies.size() > 0) { + authorizeService.removePoliciesActionFilter(context, item, Constants.READ); + } + // MUST have default policies if (defaultCollectionPolicies.size() < 1) { throw new SQLException("Collection " + collection.getID() @@ -869,6 +1084,12 @@ public void adjustItemPolicies(Context context, Item item, Collection collection @Override public void move(Context context, Item item, Collection from, Collection to) throws SQLException, AuthorizeException, IOException { + + // If the two collections are the same, do nothing. + if (from.equals(to)) { + return; + } + // Use the normal move method, and default to not inherit permissions this.move(context, item, from, to, false); } @@ -890,7 +1111,7 @@ public void move(Context context, Item item, Collection from, Collection to, boo // If we are moving from the owning collection, update that too if (isOwningCollection(item, from)) { // Update the owning collection - log.info(LogManager.getHeader(context, "move_item", + log.info(LogHelper.getHeader(context, "move_item", "item_id=" + item.getID() + ", from " + "collection_id=" + from.getID() + " to " + "collection_id=" + to.getID())); @@ -898,7 +1119,7 @@ public void move(Context context, Item item, Collection from, Collection to, boo // If applicable, update the item policies if (inheritDefaultPolicies) { - log.info(LogManager.getHeader(context, "move_item", + log.info(LogHelper.getHeader(context, "move_item", "Updating item with inherited policies")); inheritCollectionDefaultPolicies(context, item, to); } @@ -937,7 +1158,7 @@ public List getCollectionsNotLinked(Context context, Item item) thro List linkedCollections = item.getCollections(); List notLinkedCollections = new ArrayList<>(allCollections.size() - linkedCollections.size()); - if ((allCollections.size() - linkedCollections.size()) == 0) { + if (allCollections.size() - linkedCollections.size() == 0) { return notLinkedCollections; } for (Collection collection : allCollections) { @@ -977,6 +1198,53 @@ public boolean canEdit(Context context, Item item) throws SQLException { return collectionService.canEditBoolean(context, item.getOwningCollection(), false); } + /** + * Finds all Indexed Items where the current user has edit rights. If the user is an Admin, + * this is all Indexed Items. Otherwise, it includes those Items where + * an indexed "edit" policy lists either the eperson or one of the eperson's groups + * + * @param context DSpace context + * @param discoverQuery + * @return discovery search result objects + * @throws SQLException if something goes wrong + * @throws SearchServiceException if search error + */ + private DiscoverResult retrieveItemsWithEdit(Context context, DiscoverQuery discoverQuery) + throws SQLException, SearchServiceException { + EPerson currentUser = context.getCurrentUser(); + if (!authorizeService.isAdmin(context)) { + String userId = currentUser != null ? "e" + currentUser.getID().toString() : "e"; + Stream groupIds = groupService.allMemberGroupsSet(context, currentUser).stream() + .map(group -> "g" + group.getID()); + String query = Stream.concat(Stream.of(userId), groupIds) + .collect(Collectors.joining(" OR ", "edit:(", ")")); + discoverQuery.addFilterQueries(query); + } + return searchService.search(context, discoverQuery); + } + + @Override + public List findItemsWithEdit(Context context, int offset, int limit) + throws SQLException, SearchServiceException { + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + discoverQuery.setStart(offset); + discoverQuery.setMaxResults(limit); + DiscoverResult resp = retrieveItemsWithEdit(context, discoverQuery); + return resp.getIndexableObjects().stream() + .map(solrItems -> ((IndexableItem) solrItems).getIndexedObject()) + .collect(Collectors.toList()); + } + + @Override + public int countItemsWithEdit(Context context) throws SQLException, SearchServiceException { + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setMaxResults(0); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + DiscoverResult resp = retrieveItemsWithEdit(context, discoverQuery); + return (int) resp.getTotalSearchResults(); + } + /** * Check if the item is an inprogress submission * @@ -985,6 +1253,7 @@ public boolean canEdit(Context context, Item item) throws SQLException { * @return true if the item is an inprogress submission, i.e. a WorkspaceItem or WorkflowItem * @throws SQLException An exception that provides information on a database access error or other errors. */ + @Override public boolean isInProgressSubmission(Context context, Item item) throws SQLException { return workspaceItemService.findByItem(context, item) != null || workflowItemService.findByItem(context, item) != null; @@ -1008,12 +1277,15 @@ public boolean isInProgressSubmission(Context context, Item item) throws SQLExce * to perform a particular action. */ protected void addDefaultPoliciesNotInPlace(Context context, DSpaceObject dso, - List defaultCollectionPolicies) - throws SQLException, AuthorizeException { + List defaultCollectionPolicies) throws SQLException, AuthorizeException { + boolean appendMode = configurationService + .getBooleanProperty("core.authorization.installitem.inheritance-read.append-mode", false); for (ResourcePolicy defaultPolicy : defaultCollectionPolicies) { if (!authorizeService .isAnIdenticalPolicyAlreadyInPlace(context, dso, defaultPolicy.getGroup(), Constants.READ, - defaultPolicy.getID())) { + defaultPolicy.getID()) && + (!appendMode && this.isNotAlreadyACustomRPOfThisTypeOnDSO(context, dso) || + appendMode && this.shouldBeAppended(context, dso, defaultPolicy))) { ResourcePolicy newPolicy = resourcePolicyService.clone(context, defaultPolicy); newPolicy.setdSpaceObject(dso); newPolicy.setAction(Constants.READ); @@ -1023,6 +1295,107 @@ protected void addDefaultPoliciesNotInPlace(Context context, DSpaceObject dso, } } + private void addCustomPoliciesNotInPlace(Context context, DSpaceObject dso, List customPolicies) + throws SQLException, AuthorizeException { + boolean customPoliciesAlreadyInPlace = authorizeService + .findPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM).size() > 0; + if (!customPoliciesAlreadyInPlace) { + authorizeService.addPolicies(context, customPolicies, dso); + } + } + + /** + * Check whether or not there is already an RP on the given dso, which has actionId={@link Constants.READ} and + * resourceTypeId={@link ResourcePolicy.TYPE_CUSTOM} + * + * @param context DSpace context + * @param dso DSpace object to check for custom read RP + * @return True if there is no RP on the item with custom read RP, otherwise false + * @throws SQLException If something goes wrong retrieving the RP on the DSO + */ + private boolean isNotAlreadyACustomRPOfThisTypeOnDSO(Context context, DSpaceObject dso) throws SQLException { + List readRPs = resourcePolicyService.find(context, dso, Constants.READ); + for (ResourcePolicy readRP : readRPs) { + if (readRP.getRpType() != null && readRP.getRpType().equals(ResourcePolicy.TYPE_CUSTOM)) { + return false; + } + } + return true; + } + + /** + * Check if the provided default policy should be appended or not to the final + * item. If an item has at least one custom READ policy any anonymous READ + * policy with empty start/end date should be skipped + * + * @param context DSpace context + * @param dso DSpace object to check for custom read RP + * @param defaultPolicy The policy to check + * @return + * @throws SQLException If something goes wrong retrieving the RP on the DSO + */ + private boolean shouldBeAppended(Context context, DSpaceObject dso, ResourcePolicy defaultPolicy) + throws SQLException { + boolean hasCustomPolicy = resourcePolicyService.find(context, dso, Constants.READ) + .stream() + .filter(rp -> (Objects.nonNull(rp.getRpType()) && + Objects.equals(rp.getRpType(), ResourcePolicy.TYPE_CUSTOM))) + .findFirst() + .isPresent(); + + boolean isAnonimousGroup = Objects.nonNull(defaultPolicy.getGroup()) + && StringUtils.equals(defaultPolicy.getGroup().getName(), Group.ANONYMOUS); + + boolean datesAreNull = Objects.isNull(defaultPolicy.getStartDate()) + && Objects.isNull(defaultPolicy.getEndDate()); + + return !(hasCustomPolicy && isAnonimousGroup && datesAreNull); + } + + /** + * Returns an iterator of Items possessing the passed metadata field, or only + * those matching the passed value, if value is not Item.ANY + * + * @param context DSpace context object + * @param schema metadata field schema + * @param element metadata field element + * @param qualifier metadata field qualifier + * @param value field value or Item.ANY to match any value + * @return an iterator over the items matching that authority value + * @throws SQLException if database error + * An exception that provides information on a database access error or other errors. + * @throws AuthorizeException if authorization error + * Exception indicating the current user of the context does not have permission + * to perform a particular action. + */ + @Override + public Iterator findArchivedByMetadataField(Context context, + String schema, String element, String qualifier, String value) + throws SQLException, AuthorizeException { + MetadataSchema mds = metadataSchemaService.find(context, schema); + if (mds == null) { + throw new IllegalArgumentException("No such metadata schema: " + schema); + } + MetadataField mdf = metadataFieldService.findByElement(context, mds, element, qualifier); + if (mdf == null) { + throw new IllegalArgumentException( + "No such metadata field: schema=" + schema + ", element=" + element + ", qualifier=" + qualifier); + } + + if (Item.ANY.equals(value)) { + return itemDAO.findByMetadataField(context, mdf, null, true); + } else { + return itemDAO.findByMetadataField(context, mdf, value, true); + } + } + + @Override + public Iterator findArchivedByMetadataField(Context context, String metadataField, String value) + throws SQLException, AuthorizeException { + String[] mdValueByField = getMDValueByField(metadataField); + return findArchivedByMetadataField(context, mdValueByField[0], mdValueByField[1], mdValueByField[2], value); + } + /** * Returns an iterator of Items possessing the passed metadata field, or only * those matching the passed value, if value is not Item.ANY @@ -1062,16 +1435,6 @@ public Iterator findByMetadataField(Context context, } } - @Override - public Iterator findByMetadataQuery(Context context, List> listFieldList, - List query_op, List query_val, List collectionUuids, - String regexClause, int offset, int limit) - throws SQLException, AuthorizeException, IOException { - return itemDAO - .findByMetadataQuery(context, listFieldList, query_op, query_val, collectionUuids, regexClause, offset, - limit); - } - @Override public DSpaceObject getAdminObject(Context context, Item item, int action) throws SQLException { DSpaceObject adminObject = null; @@ -1245,10 +1608,15 @@ protected void getAuthoritiesAndConfidences(String fieldKey, Collection collecti @Override public Item findByIdOrLegacyId(Context context, String id) throws SQLException { - if (StringUtils.isNumeric(id)) { - return findByLegacyId(context, Integer.parseInt(id)); - } else { - return find(context, UUID.fromString(id)); + try { + if (StringUtils.isNumeric(id)) { + return findByLegacyId(context, Integer.parseInt(id)); + } else { + return find(context, UUID.fromString(id)); + } + } catch (IllegalArgumentException e) { + // Not a valid legacy ID or valid UUID + return null; } } @@ -1343,7 +1711,7 @@ public List getMetadata(Item item, String schema, String element, fullMetadataValueList.addAll(relationshipMetadataService.getRelationshipMetadata(item, true)); fullMetadataValueList.addAll(dbMetadataValues); - item.setCachedMetadata(sortMetadataValueList(fullMetadataValueList)); + item.setCachedMetadata(MetadataValueComparators.sort(fullMetadataValueList)); } log.debug("Called getMetadata for " + item.getID() + " based on cache"); @@ -1385,28 +1753,6 @@ protected void moveSingleMetadataValue(Context context, Item dso, int place, Met } } - /** - * This method will sort the List of MetadataValue objects based on the MetadataSchema, MetadataField Element, - * MetadataField Qualifier and MetadataField Place in that order. - * @param listToReturn The list to be sorted - * @return The list sorted on those criteria - */ - private List sortMetadataValueList(List listToReturn) { - Comparator comparator = Comparator.comparing( - metadataValue -> metadataValue.getMetadataField().getMetadataSchema().getName(), - Comparator.nullsFirst(Comparator.naturalOrder())); - comparator = comparator.thenComparing(metadataValue -> metadataValue.getMetadataField().getElement(), - Comparator.nullsFirst(Comparator.naturalOrder())); - comparator = comparator.thenComparing(metadataValue -> metadataValue.getMetadataField().getQualifier(), - Comparator.nullsFirst(Comparator.naturalOrder())); - comparator = comparator.thenComparing(metadataValue -> metadataValue.getPlace(), - Comparator.nullsFirst(Comparator.naturalOrder())); - - Stream metadataValueStream = listToReturn.stream().sorted(comparator); - listToReturn = metadataValueStream.collect(Collectors.toList()); - return listToReturn; - } - @Override public MetadataValue addMetadata(Context context, Item dso, String schema, String element, String qualifier, String lang, String value, String authority, int confidence, int place) throws SQLException { @@ -1427,5 +1773,100 @@ public MetadataValue addMetadata(Context context, Item dso, String schema, Strin .stream().findFirst().orElse(null); } + @Override + public String getEntityTypeLabel(Item item) { + List mdvs = getMetadata(item, "dspace", "entity", "type", Item.ANY, false); + if (mdvs.isEmpty()) { + return null; + } + + if (mdvs.size() > 1) { + log.warn( + "Item with uuid {}, handle {} has {} entity types ({}), expected 1 entity type", + item.getID(), item.getHandle(), mdvs.size(), + mdvs.stream().map(MetadataValue::getValue).collect(Collectors.toList()) + ); + } + + String entityType = mdvs.get(0).getValue(); + if (StringUtils.isBlank(entityType)) { + return null; + } + + return entityType; + } + + @Override + public EntityType getEntityType(Context context, Item item) throws SQLException { + String entityTypeString = getEntityTypeLabel(item); + if (StringUtils.isBlank(entityTypeString)) { + return null; + } + + return entityTypeService.findByEntityType(context, entityTypeString); + } + + private void removeOrcidSynchronizationStuff(Context context, Item item) throws SQLException, AuthorizeException { + + if (isNotProfileOrOrcidEntity(item)) { + return; + } + + context.turnOffAuthorisationSystem(); + + try { + + createOrcidQueueRecordsToDeleteOnOrcid(context, item); + deleteOrcidHistoryRecords(context, item); + deleteOrcidQueueRecords(context, item); + + } finally { + context.restoreAuthSystemState(); + } + + } + + private boolean isNotProfileOrOrcidEntity(Item item) { + String entityType = getEntityTypeLabel(item); + return !OrcidEntityType.isValidEntityType(entityType) + && !researcherProfileService.getProfileType().equals(entityType); + } + + private void createOrcidQueueRecordsToDeleteOnOrcid(Context context, Item entity) throws SQLException { + + String entityType = getEntityTypeLabel(entity); + if (entityType == null || researcherProfileService.getProfileType().equals(entityType)) { + return; + } + + Map profileAndPutCodeMap = orcidHistoryService.findLastPutCodes(context, entity); + for (Item profile : profileAndPutCodeMap.keySet()) { + if (orcidSynchronizationService.isSynchronizationAllowed(profile, entity)) { + String putCode = profileAndPutCodeMap.get(profile); + String title = getMetadataFirstValue(entity, "dc", "title", null, Item.ANY); + orcidQueueService.createEntityDeletionRecord(context, profile, title, entityType, putCode); + } + } + + } + + private void deleteOrcidHistoryRecords(Context context, Item item) throws SQLException { + List historyRecords = orcidHistoryService.findByProfileItemOrEntity(context, item); + for (OrcidHistory historyRecord : historyRecords) { + if (historyRecord.getProfileItem().equals(item)) { + orcidHistoryService.delete(context, historyRecord); + } else { + historyRecord.setEntity(null); + orcidHistoryService.update(context, historyRecord); + } + } + } + + private void deleteOrcidQueueRecords(Context context, Item item) throws SQLException { + List orcidQueueRecords = orcidQueueService.findByProfileItemOrEntity(context, item); + for (OrcidQueue orcidQueueRecord : orcidQueueRecords) { + orcidQueueService.delete(context, orcidQueueRecord); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java b/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java index be804a9bbb94..673a30d2ddfc 100644 --- a/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java +++ b/dspace-api/src/main/java/org/dspace/content/LicenseUtils.java @@ -59,7 +59,7 @@ private LicenseUtils() { } * {6} the eperson object that will be formatted using the appropriate * LicenseArgumentFormatter plugin (if defined)
    * {x} any addition argument supplied wrapped in the - * LicenseArgumentFormatter based on his type (map key) + * LicenseArgumentFormatter based on its type (map key) * * @param locale Formatter locale * @param collection collection to get license from diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataDSpaceCsvExportServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/MetadataDSpaceCsvExportServiceImpl.java index 7dad2117d553..8bc34d3f5ed1 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataDSpaceCsvExportServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataDSpaceCsvExportServiceImpl.java @@ -9,12 +9,12 @@ import java.sql.SQLException; import java.util.ArrayList; -import java.util.Collections; +import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Set; import java.util.UUID; -import com.google.common.collect.Iterators; import org.dspace.app.bulkedit.DSpaceCSV; import org.dspace.app.util.service.DSpaceObjectUtils; import org.dspace.content.service.ItemService; @@ -102,40 +102,36 @@ public DSpaceCSV export(Context context, Community community, boolean exportAll) } /** - * Build an array list of item ids that are in a community (include sub-communities and collections) + * Build a Java Collection of item IDs that are in a Community (including + * its sub-Communities and Collections) * * @param context DSpace context * @param community The community to build from - * @return The list of item ids + * @return Iterator over the Collection of item ids * @throws SQLException if database error */ private Iterator buildFromCommunity(Context context, Community community) throws SQLException { + Set result = new HashSet<>(); + // Add all the collections List collections = community.getCollections(); - Iterator result = Collections.emptyIterator(); for (Collection collection : collections) { Iterator items = itemService.findByCollection(context, collection); - result = addItemsToResult(result, items); - + while (items.hasNext()) { + result.add(items.next()); + } } - // Add all the sub-communities + + // Add all the sub-communities List communities = community.getSubcommunities(); for (Community subCommunity : communities) { Iterator items = buildFromCommunity(context, subCommunity); - result = addItemsToResult(result, items); - } - - return result; - } - - private Iterator addItemsToResult(Iterator result, Iterator items) { - if (result == null) { - result = items; - } else { - result = Iterators.concat(result, items); + while (items.hasNext()) { + result.add(items.next()); + } } - return result; + return result.iterator(); } } diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataField.java b/dspace-api/src/main/java/org/dspace/content/MetadataField.java index 0ea176c75127..8b767011999e 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataField.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataField.java @@ -32,7 +32,6 @@ * metadata element belongs in a field. * * @author Martin Hald - * @version $Revision$ * @see org.dspace.content.MetadataValue * @see org.dspace.content.MetadataSchema */ @@ -77,6 +76,7 @@ protected MetadataField() { * * @return metadata field id */ + @Override public Integer getID() { return id; } @@ -164,7 +164,7 @@ public void setMetadataSchema(MetadataSchema metadataSchema) { */ @Override public boolean equals(Object obj) { - if (obj == null) { + if (!(obj instanceof MetadataField)) { return false; } Class objClass = HibernateProxyHelper.getClassWithoutInitializingProxy(obj); @@ -175,10 +175,7 @@ public boolean equals(Object obj) { if (!this.getID().equals(other.getID())) { return false; } - if (!getMetadataSchema().equals(other.getMetadataSchema())) { - return false; - } - return true; + return getMetadataSchema().equals(other.getMetadataSchema()); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataFieldName.java b/dspace-api/src/main/java/org/dspace/content/MetadataFieldName.java index 8c3dfc1bccf8..8d7f4b027733 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataFieldName.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataFieldName.java @@ -17,13 +17,13 @@ */ public class MetadataFieldName { /** Name of the metadata schema which defines this field. Never null. */ - public final String SCHEMA; + public final String schema; /** Element name of this field. Never null. */ - public final String ELEMENT; + public final String element; /** Qualifier name of this field. May be {@code null}. */ - public final String QUALIFIER; + public final String qualifier; /** * Initialize a tuple of (schema, element, qualifier) to name a metadata field. @@ -40,9 +40,9 @@ public MetadataFieldName(@Nonnull String schema, @Nonnull String element, String throw new NullPointerException("Element must not be null."); } - SCHEMA = schema; - ELEMENT = element; - QUALIFIER = qualifier; + this.schema = schema; + this.element = element; + this.qualifier = qualifier; } /** @@ -59,9 +59,9 @@ public MetadataFieldName(@Nonnull String schema, @Nonnull String element) { throw new NullPointerException("Element must not be null."); } - SCHEMA = schema; - ELEMENT = element; - QUALIFIER = null; + this.schema = schema; + this.element = element; + qualifier = null; } /** @@ -79,9 +79,9 @@ public MetadataFieldName(@Nonnull MetadataSchemaEnum schema, @Nonnull String ele throw new IllegalArgumentException("Element must not be null."); } - SCHEMA = schema.getName(); - ELEMENT = element; - QUALIFIER = qualifier; + this.schema = schema.getName(); + this.element = element; + this.qualifier = qualifier; } /** @@ -98,9 +98,9 @@ public MetadataFieldName(@Nonnull MetadataSchemaEnum schema, @Nonnull String ele throw new IllegalArgumentException("Element must not be null."); } - SCHEMA = schema.getName(); - ELEMENT = element; - QUALIFIER = null; + this.schema = schema.getName(); + this.element = element; + qualifier = null; } /** @@ -110,9 +110,9 @@ public MetadataFieldName(@Nonnull MetadataSchemaEnum schema, @Nonnull String ele */ public MetadataFieldName(@Nonnull String name) { String[] elements = parse(name); - SCHEMA = elements[0]; - ELEMENT = elements[1]; - QUALIFIER = elements[2]; + schema = elements[0]; + element = elements[1]; + qualifier = elements[2]; } /** @@ -138,17 +138,17 @@ public static String[] parse(@Nonnull String name) { /** * Format a dotted-atoms representation of this field name. - * @return SCHEMA.ELEMENT.QUALIFIER + * @return schema.element.qualifier */ @Override public String toString() { StringBuilder buffer = new StringBuilder(32); - buffer.append(SCHEMA) + buffer.append(schema) .append('.') - .append(ELEMENT); - if (null != QUALIFIER) { + .append(element); + if (null != qualifier) { buffer.append('.') - .append(QUALIFIER); + .append(qualifier); } return buffer.toString(); } diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java index 569b5840c6d1..254cff0266c2 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java @@ -25,7 +25,7 @@ import org.dspace.content.service.SiteService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.discovery.indexobject.IndexableMetadataField; import org.dspace.event.Event; import org.springframework.beans.factory.annotation.Autowired; @@ -83,7 +83,7 @@ public MetadataField create(Context context, MetadataSchema metadataSchema, Stri metadataField = metadataFieldDAO.create(context, metadataField); metadataFieldDAO.save(context, metadataField); - log.info(LogManager.getHeader(context, "create_metadata_field", + log.info(LogHelper.getHeader(context, "create_metadata_field", "metadata_field_id=" + metadataField.getID())); // Update the index of type metadatafield this.triggerEventToUpdateIndex(context, metadataField.getID()); @@ -155,7 +155,7 @@ public void update(Context context, MetadataField metadataField) metadataFieldDAO.save(context, metadataField); - log.info(LogManager.getHeader(context, "update_metadatafieldregistry", + log.info(LogHelper.getHeader(context, "update_metadatafieldregistry", "metadata_field_id=" + metadataField.getID() + "element=" + metadataField .getElement() + "qualifier=" + metadataField.getQualifier())); @@ -187,7 +187,7 @@ public void delete(Context context, MetadataField metadataField) throws SQLExcep .toString() + " cannot be deleted as it is currently used by one or more objects."); } - log.info(LogManager.getHeader(context, "delete_metadata_field", + log.info(LogHelper.getHeader(context, "delete_metadata_field", "metadata_field_id=" + metadataField.getID())); // Update the index of type metadatafield this.triggerEventToUpdateIndex(context, metadataField.getID()); diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataSchema.java b/dspace-api/src/main/java/org/dspace/content/MetadataSchema.java index 727181ee9df0..f60e5e1604cf 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataSchema.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataSchema.java @@ -30,7 +30,6 @@ *

    * * @author Martin Hald - * @version $Revision$ * @see org.dspace.content.MetadataValue * @see org.dspace.content.MetadataField */ @@ -129,6 +128,7 @@ public void setName(String name) { * * @return schema record key */ + @Override public Integer getID() { return id; } diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java b/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java index deca62566aae..559e3bf5cf5a 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataSchemaEnum.java @@ -16,7 +16,8 @@ public enum MetadataSchemaEnum { DC("dc"), EPERSON("eperson"), - RELATION("relation"); + RELATION("relation"), + PERSON("person"); /** * The String representation of the MetadataSchemaEnum diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataSchemaServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/MetadataSchemaServiceImpl.java index d5c2c22f884c..2eeb57a395e2 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataSchemaServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataSchemaServiceImpl.java @@ -17,7 +17,7 @@ import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.springframework.beans.factory.annotation.Autowired; /** @@ -74,7 +74,7 @@ public MetadataSchema create(Context context, String name, String namespace) metadataSchema.setNamespace(namespace); metadataSchema.setName(name); metadataSchemaDAO.save(context, metadataSchema); - log.info(LogManager.getHeader(context, "create_metadata_schema", + log.info(LogHelper.getHeader(context, "create_metadata_schema", "metadata_schema_id=" + metadataSchema.getID())); return metadataSchema; @@ -106,7 +106,7 @@ public void update(Context context, MetadataSchema metadataSchema) + " unique"); } metadataSchemaDAO.save(context, metadataSchema); - log.info(LogManager.getHeader(context, "update_metadata_schema", + log.info(LogHelper.getHeader(context, "update_metadata_schema", "metadata_schema_id=" + metadataSchema.getID() + "namespace=" + metadataSchema.getNamespace() + "name=" + metadataSchema.getName())); } @@ -125,7 +125,7 @@ public void delete(Context context, MetadataSchema metadataSchema) throws SQLExc metadataSchemaDAO.delete(context, metadataSchema); - log.info(LogManager.getHeader(context, "delete_metadata_schema", + log.info(LogHelper.getHeader(context, "delete_metadata_schema", "metadata_schema_id=" + metadataSchema.getID())); } diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java index 2d9808ae454c..31479e620618 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java @@ -19,6 +19,7 @@ import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; +import javax.persistence.Transient; import org.dspace.core.Context; import org.dspace.core.ReloadableEntity; @@ -46,7 +47,7 @@ public class MetadataValue implements ReloadableEntity { @Column(name = "metadata_value_id") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "metadatavalue_seq") @SequenceGenerator(name = "metadatavalue_seq", sequenceName = "metadatavalue_seq", allocationSize = 1) - private Integer id; + private final Integer id; /** * The primary key for the metadata value @@ -59,7 +60,7 @@ public class MetadataValue implements ReloadableEntity { * The value of the field */ @Lob - @Type(type = "org.hibernate.type.MaterializedClobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "text_value") private String value; @@ -104,6 +105,7 @@ protected MetadataValue() { * * @return metadata value ID */ + @Override public Integer getID() { return id; } @@ -170,6 +172,14 @@ public void setMetadataField(MetadataField metadataField) { this.metadataField = metadataField; } + /** + * @return {@code MetadataField#getID()} + */ + @Transient + protected Integer getMetadataFieldId() { + return getMetadataField().getID(); + } + /** * Get the metadata value. * @@ -249,10 +259,7 @@ public boolean equals(Object obj) { if (!this.getID().equals(other.getID())) { return false; } - if (!this.getDSpaceObject().getID().equals(other.getDSpaceObject().getID())) { - return false; - } - return true; + return this.getDSpaceObject().getID().equals(other.getDSpaceObject().getID()); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValueComparators.java b/dspace-api/src/main/java/org/dspace/content/MetadataValueComparators.java new file mode 100644 index 000000000000..306258f36a64 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValueComparators.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; + +/** + * This class contains only static members that can be used + * to sort list of {@link MetadataValue} + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public final class MetadataValueComparators { + + private MetadataValueComparators() {} + + /** + * This is the default comparator that mimics the ordering + * applied by the standard {@code @OrderBy} annotation inside + * {@link DSpaceObject#getMetadata()} + */ + public static final Comparator defaultComparator = + Comparator.comparing(MetadataValue::getMetadataFieldId) + .thenComparing( + MetadataValue::getPlace, + Comparator.nullsFirst(Comparator.naturalOrder()) + ); + + /** + * This method creates a new {@code List} ordered by the + * {@code MetadataComparators#defaultComparator}. + * + * @param metadataValues + * @return {@code List} ordered copy list using stream. + */ + public static final List sort(List metadataValues) { + return metadataValues + .stream() + .sorted(MetadataValueComparators.defaultComparator) + .collect(Collectors.toList()); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValueServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/MetadataValueServiceImpl.java index 2451e6a8e6a1..0c34c04f3051 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataValueServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValueServiceImpl.java @@ -21,7 +21,7 @@ import org.dspace.content.service.MetadataValueService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.springframework.beans.factory.annotation.Autowired; /** @@ -55,6 +55,8 @@ public MetadataValue create(Context context, DSpaceObject dso, MetadataField met //An update here isn't needed, this is persited upon the merge of the owning object // metadataValueDAO.save(context, metadataValue); metadataValue = metadataValueDAO.create(context, metadataValue); + log.info(LogHelper.getHeader(context, "add_metadatavalue", + "metadata_value_id=" + metadataValue.getID())); return metadataValue; } @@ -80,7 +82,7 @@ public Iterator findByFieldAndValue(Context context, MetadataFiel @Override public void update(Context context, MetadataValue metadataValue) throws SQLException { metadataValueDAO.save(context, metadataValue); - log.info(LogManager.getHeader(context, "update_metadatavalue", + log.info(LogHelper.getHeader(context, "update_metadatavalue", "metadata_value_id=" + metadataValue.getID())); } @@ -102,7 +104,7 @@ public void update(Context context, MetadataValue metadataValue, boolean updateL @Override public void delete(Context context, MetadataValue metadataValue) throws SQLException { - log.info(LogManager.getHeader(context, "delete_metadata_value", + log.info(LogHelper.getHeader(context, "delete_metadata_value", " metadata_value_id=" + metadataValue.getID())); metadataValueDAO.delete(context, metadataValue); } diff --git a/dspace-api/src/main/java/org/dspace/content/PredefinedUUIDGenerator.java b/dspace-api/src/main/java/org/dspace/content/PredefinedUUIDGenerator.java new file mode 100644 index 000000000000..aa4a8ea5429c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/PredefinedUUIDGenerator.java @@ -0,0 +1,33 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import java.io.Serializable; +import java.util.UUID; + +import org.hibernate.engine.spi.SharedSessionContractImplementor; +import org.hibernate.id.UUIDGenerator; + +/** + * Allows DSpaceObjects to provide a pre-determined UUID + * + * @author April Herron + */ +public class PredefinedUUIDGenerator extends UUIDGenerator { + + @Override + public Serializable generate(SharedSessionContractImplementor session, Object object) { + if (object instanceof DSpaceObject) { + UUID uuid = ((DSpaceObject) object).getPredefinedUUID(); + if (uuid != null) { + return uuid; + } + } + return super.generate(session, object); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/QAEvent.java b/dspace-api/src/main/java/org/dspace/content/QAEvent.java new file mode 100644 index 000000000000..9e90f81be32c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/QAEvent.java @@ -0,0 +1,213 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import java.io.UnsupportedEncodingException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Date; + +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import org.dspace.qaevent.service.dto.OpenaireMessageDTO; +import org.dspace.qaevent.service.dto.QAMessageDTO; +import org.dspace.util.RawJsonDeserializer; + +/** + * This class represent the Quality Assurance broker data as loaded in our solr + * qaevent core + * + */ +public class QAEvent { + public static final char[] HEX_DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', + 'f' }; + public static final String ACCEPTED = "accepted"; + public static final String REJECTED = "rejected"; + public static final String DISCARDED = "discarded"; + + public static final String OPENAIRE_SOURCE = "openaire"; + + private String source; + + private String eventId; + /** + * contains the targeted dspace object, + * ie: oai:www.openstarts.units.it:123456789/1120 contains the handle + * of the DSpace pbject in its final part 123456789/1120 + * */ + private String originalId; + + /** + * evaluated with the targeted dspace object id + * + * */ + private String target; + + private String related; + + private String title; + + private String topic; + + private double trust; + + @JsonDeserialize(using = RawJsonDeserializer.class) + private String message; + + private Date lastUpdate; + + private String status = "PENDING"; + + public QAEvent() { + } + + public QAEvent(String source, String originalId, String target, String title, + String topic, double trust, String message, Date lastUpdate) { + super(); + this.source = source; + this.originalId = originalId; + this.target = target; + this.title = title; + this.topic = topic; + this.trust = trust; + this.message = message; + this.lastUpdate = lastUpdate; + try { + computedEventId(); + } catch (NoSuchAlgorithmException | UnsupportedEncodingException e) { + throw new IllegalStateException(e); + } + + } + + public String getOriginalId() { + return originalId; + } + + public void setOriginalId(String originalId) { + this.originalId = originalId; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getTopic() { + return topic; + } + + public void setTopic(String topic) { + this.topic = topic; + } + + public double getTrust() { + return trust; + } + + public void setTrust(double trust) { + this.trust = trust; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + public String getEventId() { + if (eventId == null) { + try { + computedEventId(); + } catch (NoSuchAlgorithmException | UnsupportedEncodingException e) { + throw new RuntimeException(e); + } + } + return eventId; + } + + public void setEventId(String eventId) { + this.eventId = eventId; + } + + public String getTarget() { + return target; + } + + public void setTarget(String target) { + this.target = target; + } + + public Date getLastUpdate() { + return lastUpdate; + } + + public void setLastUpdate(Date lastUpdate) { + this.lastUpdate = lastUpdate; + } + + public void setRelated(String related) { + this.related = related; + } + + public String getRelated() { + return related; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getStatus() { + return status; + } + + public String getSource() { + return source != null ? source : OPENAIRE_SOURCE; + } + + public void setSource(String source) { + this.source = source; + } + + /* + * DTO constructed via Jackson use empty constructor. In this case, the eventId + * must be compute on the get method. This method create a signature based on + * the event fields and store it in the eventid attribute. + */ + private void computedEventId() throws NoSuchAlgorithmException, UnsupportedEncodingException { + MessageDigest digester = MessageDigest.getInstance("MD5"); + String dataToString = "source=" + source + ",originalId=" + originalId + ", title=" + title + ", topic=" + + topic + ", trust=" + trust + ", message=" + message; + digester.update(dataToString.getBytes("UTF-8")); + byte[] signature = digester.digest(); + char[] arr = new char[signature.length << 1]; + for (int i = 0; i < signature.length; i++) { + int b = signature[i]; + int idx = i << 1; + arr[idx] = HEX_DIGITS[(b >> 4) & 0xf]; + arr[idx + 1] = HEX_DIGITS[b & 0xf]; + } + eventId = new String(arr); + + } + + public Class getMessageDtoClass() { + switch (getSource()) { + case OPENAIRE_SOURCE: + return OpenaireMessageDTO.class; + default: + throw new IllegalArgumentException("Unknown event's source: " + getSource()); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/QAEventProcessed.java b/dspace-api/src/main/java/org/dspace/content/QAEventProcessed.java new file mode 100644 index 000000000000..3631a2ff68c6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/QAEventProcessed.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import java.io.Serializable; +import java.util.Date; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; + +import org.dspace.eperson.EPerson; + +/** + * This class represent the stored information about processed notification + * broker events + * + */ +@Entity +@Table(name = "qaevent_processed") +public class QAEventProcessed implements Serializable { + + private static final long serialVersionUID = 3427340199132007814L; + + @Id + @Column(name = "qaevent_id") + private String eventId; + + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "qaevent_timestamp") + private Date eventTimestamp; + + @JoinColumn(name = "eperson_uuid") + @ManyToOne + private EPerson eperson; + + @JoinColumn(name = "item_uuid") + @ManyToOne + private Item item; + + public String getEventId() { + return eventId; + } + + public void setEventId(String eventId) { + this.eventId = eventId; + } + + public Date getEventTimestamp() { + return eventTimestamp; + } + + public void setEventTimestamp(Date eventTimestamp) { + this.eventTimestamp = eventTimestamp; + } + + public EPerson getEperson() { + return eperson; + } + + public void setEperson(EPerson eperson) { + this.eperson = eperson; + } + + public Item getItem() { + return item; + } + + public void setItem(Item item) { + this.item = item; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/Relationship.java b/dspace-api/src/main/java/org/dspace/content/Relationship.java index 72ffbedff0d5..77c418a23dea 100644 --- a/dspace-api/src/main/java/org/dspace/content/Relationship.java +++ b/dspace-api/src/main/java/org/dspace/content/Relationship.java @@ -89,6 +89,15 @@ public class Relationship implements ReloadableEntity { @Column(name = "rightward_value") private String rightwardValue; + /** + * Whether the left and/or right side of a given relationship are the "latest". + * A side of a relationship is "latest" if the item on that side has either no other versions, + * or the item on that side is the most recent version that is relevant to the given relationship. + * This column affects what version of an item appears on search pages or the relationship listings of other items. + */ + @Column(name = "latest_version_status") + private LatestVersionStatus latestVersionStatus = LatestVersionStatus.BOTH; + /** * Protected constructor, create object using: * {@link org.dspace.content.service.RelationshipService#create(Context)} } @@ -216,10 +225,44 @@ public void setRightwardValue(String rightwardValue) { this.rightwardValue = rightwardValue; } + /** + * Getter for {@link #latestVersionStatus}. + * @return the latest version status of this relationship. + */ + public LatestVersionStatus getLatestVersionStatus() { + return latestVersionStatus; + } + + /** + * Setter for {@link #latestVersionStatus}. + * @param latestVersionStatus the new latest version status for this relationship. + */ + public void setLatestVersionStatus(LatestVersionStatus latestVersionStatus) { + if (this.latestVersionStatus == latestVersionStatus) { + return; // no change or cache reset needed + } + + this.latestVersionStatus = latestVersionStatus; + + // on one item, relation.* fields will change + // on the other item, relation.*.latestForDiscovery will change + leftItem.setMetadataModified(); + rightItem.setMetadataModified(); + } + + public enum LatestVersionStatus { + // NOTE: SQL migration expects BOTH to be the first constant in this enum! + BOTH, // both items in this relationship are the "latest" + LEFT_ONLY, // the left-hand item of this relationship is the "latest", but the right-hand item is not + RIGHT_ONLY // the right-hand item of this relationship is the "latest", but the left-hand item is not + // NOTE: one side of any given relationship should ALWAYS be the "latest" + } + /** * Standard getter for the ID for this Relationship * @return The ID of this relationship */ + @Override public Integer getID() { return id; } diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataService.java b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataService.java index 38b0d18bd92f..c3570ad47e9d 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataService.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataService.java @@ -56,7 +56,9 @@ public List findRelationshipMetadataValueForItemRelat * This method will retrieve the EntityType String from an item * @param item The Item for which the entityType String will be returned * @return A String value indicating the entityType + * @deprecated use {@link org.dspace.content.service.ItemService#getEntityTypeLabel(Item)} instead. */ + @Deprecated public String getEntityTypeStringFromMetadata(Item item); } diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java index f8b756a1eaf0..c6cf21a55fc7 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataServiceImpl.java @@ -7,16 +7,24 @@ */ package org.dspace.content; +import static org.dspace.content.RelationshipType.Tilted.LEFT; +import static org.dspace.content.RelationshipType.Tilted.RIGHT; + import java.sql.SQLException; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; +import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId; +import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.virtual.VirtualMetadataConfiguration; import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.core.Constants; @@ -33,6 +41,12 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ @Autowired(required = true) protected RelationshipService relationshipService; + @Autowired(required = true) + protected RelationshipTypeService relationshipTypeService; + + @Autowired(required = true) + protected ItemService itemService; + @Autowired(required = true) protected VirtualMetadataPopulator virtualMetadataPopulator; @@ -44,12 +58,25 @@ public List getRelationshipMetadata(Item item, boolea Context context = new Context(); List fullMetadataValueList = new LinkedList<>(); try { - String entityType = getEntityTypeStringFromMetadata(item); - if (StringUtils.isNotBlank(entityType)) { + EntityType entityType = itemService.getEntityType(context, item); + if (entityType != null) { + // NOTE: The following code will add metadata fields of type relation.*.latestForDiscovery + // (e.g. relation.isAuthorOfPublication.latestForDiscovery). + // These fields contain the UUIDs of the items that have a relationship with current item, + // from the perspective of the other item. In other words, given a relationship with this item, + // the current item should have "latest status" in order for the other item to appear in + // relation.*.latestForDiscovery fields. + fullMetadataValueList.addAll(findLatestForDiscoveryMetadataValues(context, item, entityType)); + + // NOTE: The following code will, among other things, + // add metadata fields of type relation.* (e.g. relation.isAuthorOfPublication). + // These fields contain the UUIDs of the items that have a relationship with current item, + // from the perspective of this item. In other words, given a relationship with this item, + // the other item should have "latest status" in order to appear in relation.* fields. List relationships = relationshipService.findByItem(context, item, -1, -1, true); for (Relationship relationship : relationships) { fullMetadataValueList - .addAll(findRelationshipMetadataValueForItemRelationship(context, item, entityType, + .addAll(findRelationshipMetadataValueForItemRelationship(context, item, entityType.getLabel(), relationship, enableVirtualMetadata)); } @@ -60,16 +87,91 @@ public List getRelationshipMetadata(Item item, boolea return fullMetadataValueList; } - public String getEntityTypeStringFromMetadata(Item item) { - List list = item.getMetadata(); - for (MetadataValue mdv : list) { - if (StringUtils.equals(mdv.getMetadataField().getMetadataSchema().getName(), "dspace") - && StringUtils.equals(mdv.getMetadataField().getElement(), "entity") - && StringUtils.equals(mdv.getMetadataField().getQualifier(), "type")) { - return mdv.getValue(); + /** + * Create the list of relation.*.latestForDiscovery virtual metadata values for the given item. + * @param context the DSpace context. + * @param item the item. + * @param itemEntityType the entity type of the item. + * @return a list (may be empty) of metadata values of type relation.*.latestForDiscovery. + */ + protected List findLatestForDiscoveryMetadataValues( + Context context, Item item, EntityType itemEntityType + ) throws SQLException { + final String schema = MetadataSchemaEnum.RELATION.getName(); + final String qualifier = "latestForDiscovery"; + + List mdvs = new LinkedList<>(); + + List relationshipTypes = relationshipTypeService.findByEntityType(context, itemEntityType); + for (RelationshipType relationshipType : relationshipTypes) { + // item is on left side of this relationship type + // NOTE: On the left item, we should index the uuids of the right items. If the relationship type is + // "tilted right", it means that we expect a huge amount of right items, so we don't index their uuids + // on the left item as a storage/performance improvement. + // As a consequence, when searching for related items (using discovery) + // on the pages of the right items you won't be able to find the left item. + if (relationshipType.getTilted() != RIGHT + && Objects.equals(relationshipType.getLeftType(), itemEntityType)) { + String element = relationshipType.getLeftwardType(); + List data = relationshipService + .findByLatestItemAndRelationshipType(context, item, relationshipType, true); + mdvs.addAll(constructLatestForDiscoveryMetadataValues(context, schema, element, qualifier, data)); + } + + // item is on right side of this relationship type + // NOTE: On the right item, we should index the uuids of the left items. If the relationship type is + // "tilted left", it means that we expect a huge amount of left items, so we don't index their uuids + // on the right item as a storage/performance improvement. + // As a consequence, when searching for related items (using discovery) + // on the pages of the left items you won't be able to find the right item. + if (relationshipType.getTilted() != LEFT && relationshipType.getRightType().equals(itemEntityType)) { + String element = relationshipType.getRightwardType(); + List data = relationshipService + .findByLatestItemAndRelationshipType(context, item, relationshipType, false); + mdvs.addAll(constructLatestForDiscoveryMetadataValues(context, schema, element, qualifier, data)); } } - return null; + + return mdvs; + } + + /** + * Turn the given data into a list of relation.*.latestForDiscovery virtual metadata values. + * @param context the DSpace context. + * @param schema the schema for all metadata values. + * @param element the element for all metadata values. + * @param qualifier the qualifier for all metadata values. + * @param data a POJO containing the item uuid and relationship id. + * @return a list (may be empty) of metadata values of type relation.*.latestForDiscovery. + */ + protected List constructLatestForDiscoveryMetadataValues( + Context context, String schema, String element, String qualifier, List data + ) { + String mdf = new MetadataFieldName(schema, element, qualifier).toString(); + + return data.stream() + .map(datum -> { + RelationshipMetadataValue mdv = constructMetadataValue(context, mdf); + if (mdv == null) { + return null; + } + + mdv.setAuthority(Constants.VIRTUAL_AUTHORITY_PREFIX + datum.getRelationshipId()); + mdv.setValue(datum.getItemUuid().toString()); + // NOTE: place has no meaning for relation.*.latestForDiscovery metadata fields + mdv.setPlace(-1); + mdv.setUseForPlace(false); + + return mdv; + }) + .filter(Objects::nonNull) + .collect(Collectors.toUnmodifiableList()); + } + + @Override + @Deprecated + public String getEntityTypeStringFromMetadata(Item item) { + return itemService.getEntityTypeLabel(item); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java index 1b419da81631..1fdfde6c7462 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipServiceImpl.java @@ -8,10 +8,13 @@ package org.dspace.content; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Collections; -import java.util.Comparator; -import java.util.LinkedList; +import java.util.HashMap; import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -19,15 +22,19 @@ import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Relationship.LatestVersionStatus; import org.dspace.content.dao.RelationshipDAO; +import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId; import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.ItemService; import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipTypeService; +import org.dspace.content.virtual.VirtualMetadataConfiguration; import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.services.ConfigurationService; +import org.dspace.versioning.utils.RelationshipVersioningUtils; import org.springframework.beans.factory.annotation.Autowired; public class RelationshipServiceImpl implements RelationshipService { @@ -54,6 +61,10 @@ public class RelationshipServiceImpl implements RelationshipService { @Autowired private RelationshipMetadataService relationshipMetadataService; + + @Autowired + private RelationshipVersioningUtils relationshipVersioningUtils; + @Autowired private VirtualMetadataPopulator virtualMetadataPopulator; @@ -75,9 +86,10 @@ public Relationship create(Context c, Item leftItem, Item rightItem, Relationshi @Override - public Relationship create(Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, - int leftPlace, int rightPlace, String leftwardValue, String rightwardValue) - throws AuthorizeException, SQLException { + public Relationship create( + Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace, + String leftwardValue, String rightwardValue, LatestVersionStatus latestVersionStatus + ) throws AuthorizeException, SQLException { Relationship relationship = new Relationship(); relationship.setLeftItem(leftItem); relationship.setRightItem(rightItem); @@ -86,9 +98,21 @@ public Relationship create(Context c, Item leftItem, Item rightItem, Relationshi relationship.setRightPlace(rightPlace); relationship.setLeftwardValue(leftwardValue); relationship.setRightwardValue(rightwardValue); + relationship.setLatestVersionStatus(latestVersionStatus); return create(c, relationship); } + @Override + public Relationship create( + Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace, + String leftwardValue, String rightwardValue + ) throws AuthorizeException, SQLException { + return create( + c, leftItem, rightItem, relationshipType, leftPlace, rightPlace, leftwardValue, rightwardValue, + LatestVersionStatus.BOTH + ); + } + @Override public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException { if (isRelationshipValidToCreate(context, relationship)) { @@ -97,7 +121,7 @@ public Relationship create(Context context, Relationship relationship) throws SQ // This order of execution should be handled in the creation (create, updateplace, update relationship) // for a proper place allocation Relationship relationshipToReturn = relationshipDAO.create(context, relationship); - updatePlaceInRelationship(context, relationshipToReturn); + updatePlaceInRelationship(context, relationshipToReturn, null, null, true, true); update(context, relationshipToReturn); updateItemsInRelationship(context, relationship); return relationshipToReturn; @@ -112,71 +136,388 @@ public Relationship create(Context context, Relationship relationship) throws SQ } @Override - public void updatePlaceInRelationship(Context context, Relationship relationship) - throws SQLException, AuthorizeException { - Item leftItem = relationship.getLeftItem(); - // Max value is used to ensure that these will get added to the back of the list and thus receive the highest - // (last) place as it's set to a -1 for creation - if (relationship.getLeftPlace() == -1) { - relationship.setLeftPlace(Integer.MAX_VALUE); + public Relationship move( + Context context, Relationship relationship, Integer newLeftPlace, Integer newRightPlace + ) throws SQLException, AuthorizeException { + if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) || + authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) { + + // Don't do anything if neither the leftPlace nor rightPlace was updated + if (newLeftPlace != null || newRightPlace != null) { + // This order of execution should be handled in the creation (create, updateplace, update relationship) + // for a proper place allocation + updatePlaceInRelationship(context, relationship, newLeftPlace, newRightPlace, false, false); + update(context, relationship); + updateItemsInRelationship(context, relationship); + } + + return relationship; + } else { + throw new AuthorizeException( + "You do not have write rights on this relationship's items"); } + } + + @Override + public Relationship move( + Context context, Relationship relationship, Item newLeftItem, Item newRightItem + ) throws SQLException, AuthorizeException { + // If the new Item is the same as the current Item, don't move + newLeftItem = newLeftItem != relationship.getLeftItem() ? newLeftItem : null; + newRightItem = newRightItem != relationship.getRightItem() ? newRightItem : null; + + // Don't do anything if neither the leftItem nor rightItem was updated + if (newLeftItem != null || newRightItem != null) { + // First move the Relationship to the back within the current Item's lists + // This ensures that we won't have any gaps once we move the Relationship to a different Item + move( + context, relationship, + newLeftItem != null ? -1 : null, + newRightItem != null ? -1 : null + ); + + boolean insertLeft = false; + boolean insertRight = false; + + // If Item has been changed, mark the previous Item as modified to make sure we discard the old relation.* + // metadata on the next update. + // Set the Relationship's Items to the new ones, appending to the end + if (newLeftItem != null) { + relationship.getLeftItem().setMetadataModified(); + relationship.setLeftItem(newLeftItem); + relationship.setLeftPlace(-1); + insertLeft = true; + } + if (newRightItem != null) { + relationship.getRightItem().setMetadataModified(); + relationship.setRightItem(newRightItem); + relationship.setRightPlace(-1); + insertRight = true; + } + + // This order of execution should be handled in the creation (create, updateplace, update relationship) + // for a proper place allocation + updatePlaceInRelationship(context, relationship, null, null, insertLeft, insertRight); + update(context, relationship); + updateItemsInRelationship(context, relationship); + } + return relationship; + } + + /** + * This method will update the place for the Relationship and all other relationships found by the items and + * relationship type of the given Relationship. + * + * @param context The relevant DSpace context + * @param relationship The Relationship object that will have its place updated and that will be used + * to retrieve the other relationships whose place might need to be updated. + * @param newLeftPlace If the Relationship in question is to be moved, the leftPlace it is to be moved to. + * Set this to null if the Relationship has not been moved, i.e. it has just been created, + * deleted or when its Items have been modified. + * @param newRightPlace If the Relationship in question is to be moved, the rightPlace it is to be moved to. + * Set this to null if the Relationship has not been moved, i.e. it has just been created, + * deleted or when its Items have been modified. + * @param insertLeft Whether the Relationship in question should be inserted into the left Item. + * Should be set to true when creating or moving to a different Item. + * @param insertRight Whether the Relationship in question should be inserted into the right Item. + * Should be set to true when creating or moving to a different Item. + * @throws SQLException If something goes wrong + * @throws AuthorizeException + * If the user is not authorized to update the Relationship or its Items + */ + private void updatePlaceInRelationship( + Context context, Relationship relationship, + Integer newLeftPlace, Integer newRightPlace, boolean insertLeft, boolean insertRight + ) throws SQLException, AuthorizeException { + Item leftItem = relationship.getLeftItem(); Item rightItem = relationship.getRightItem(); - if (relationship.getRightPlace() == -1) { - relationship.setRightPlace(Integer.MAX_VALUE); - } - List leftRelationships = findByItemAndRelationshipType(context, - leftItem, - relationship.getRelationshipType(), true); - List rightRelationships = findByItemAndRelationshipType(context, - rightItem, - relationship.getRelationshipType(), - false); - - // These relationships are only deleted from the temporary lists incase they're present in them so that we can + + // These list also include the non-latest. This is relevant to determine whether it's deleted. + // This can also imply there may be overlapping places, and/or the given relationship will overlap + // But the shift will allow this, and only happen when needed based on the latest status + List leftRelationships = findByItemAndRelationshipType( + context, leftItem, relationship.getRelationshipType(), true, -1, -1, false + ); + List rightRelationships = findByItemAndRelationshipType( + context, rightItem, relationship.getRelationshipType(), false, -1, -1, false + ); + + // These relationships are only deleted from the temporary lists in case they're present in them so that we can // properly perform our place calculation later down the line in this method. - if (leftRelationships.contains(relationship)) { - leftRelationships.remove(relationship); + boolean deletedFromLeft = !leftRelationships.contains(relationship); + boolean deletedFromRight = !rightRelationships.contains(relationship); + leftRelationships.remove(relationship); + rightRelationships.remove(relationship); + + List leftMetadata = getSiblingMetadata(leftItem, relationship, true); + List rightMetadata = getSiblingMetadata(rightItem, relationship, false); + + // For new relationships added to the end, this will be -1. + // For new relationships added at a specific position, this will contain that position. + // For existing relationships, this will contain the place before it was moved. + // For deleted relationships, this will contain the place before it was deleted. + int oldLeftPlace = relationship.getLeftPlace(); + int oldRightPlace = relationship.getRightPlace(); + + + boolean movedUpLeft = resolveRelationshipPlace( + relationship, true, leftRelationships, leftMetadata, oldLeftPlace, newLeftPlace + ); + boolean movedUpRight = resolveRelationshipPlace( + relationship, false, rightRelationships, rightMetadata, oldRightPlace, newRightPlace + ); + + context.turnOffAuthorisationSystem(); + + //only shift if the place is relevant for the latest relationships + if (relationshipVersioningUtils.otherSideIsLatest(true, relationship.getLatestVersionStatus())) { + shiftSiblings( + relationship, true, oldLeftPlace, movedUpLeft, insertLeft, deletedFromLeft, + leftRelationships, leftMetadata + ); } - if (rightRelationships.contains(relationship)) { - rightRelationships.remove(relationship); + if (relationshipVersioningUtils.otherSideIsLatest(false, relationship.getLatestVersionStatus())) { + shiftSiblings( + relationship, false, oldRightPlace, movedUpRight, insertRight, deletedFromRight, + rightRelationships, rightMetadata + ); } - context.turnOffAuthorisationSystem(); - //If useForPlace for the leftwardType is false for the relationshipType, - // we need to sort the relationships here based on leftplace. - if (!virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), true)) { - if (!leftRelationships.isEmpty()) { - leftRelationships.sort(Comparator.comparingInt(Relationship::getLeftPlace)); - for (int i = 0; i < leftRelationships.size(); i++) { - leftRelationships.get(i).setLeftPlace(i); - } - relationship.setLeftPlace(leftRelationships.size()); + + updateItem(context, leftItem); + updateItem(context, rightItem); + + context.restoreAuthSystemState(); + } + + /** + * Return the MDVs in the Item's MDF corresponding to the given Relationship. + * Return an empty list if the Relationship isn't mapped to any MDF + * or if the mapping is configured with useForPlace=false. + * + * This returns actual metadata (not virtual) which in the same metadata field as the useForPlace. + * For a publication with 2 author relationships and 3 plain text dc.contributor.author values, + * it would return the 3 plain text dc.contributor.author values. + * For a person related to publications, it would return an empty list. + */ + private List getSiblingMetadata( + Item item, Relationship relationship, boolean isLeft + ) { + List metadata = new ArrayList<>(); + if (virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), isLeft)) { + HashMap mapping; + if (isLeft) { + mapping = virtualMetadataPopulator.getMap().get(relationship.getRelationshipType().getLeftwardType()); } else { - relationship.setLeftPlace(0); + mapping = virtualMetadataPopulator.getMap().get(relationship.getRelationshipType().getRightwardType()); } - } else { - updateItem(context, leftItem); + if (mapping != null) { + for (String mdf : mapping.keySet()) { + metadata.addAll( + // Make sure we're only looking at database MDVs; if the relationship currently overlaps + // one of these, its virtual MDV will overwrite the database MDV in itemService.getMetadata() + // The relationship pass should be sufficient to move any sibling virtual MDVs. + item.getMetadata() + .stream() + .filter(mdv -> mdv.getMetadataField().toString().equals(mdf.replace(".", "_"))) + .collect(Collectors.toList()) + ); + } + } + } + return metadata; + } + /** + * Set the left/right place of a Relationship + * - To a new place in case it's being moved + * - Resolve -1 to the actual last place based on the places of its sibling Relationships and/or MDVs + * and determine if it has been moved up in the list. + * + * Examples: + * - Insert a Relationship at place 3 + * newPlace starts out as null and is not updated. Return movedUp=false + * - Insert a Relationship at place -1 + * newPlace starts out as null and is resolved to e.g. 6. Update the Relationship and return movedUp=false + * - Move a Relationship from place 4 to 2 + * Update the Relationship and return movedUp=false. + * - Move a Relationship from place 2 to -1 + * newPlace starts out as -1 and is resolved to e.g. 5. Update the relationship and return movedUp=true. + * - Remove a relationship from place 1 + * Return movedUp=false + * + * @param relationship the Relationship that's being updated + * @param isLeft whether to consider the left side of the Relationship. + * This method should be called twice, once with isLeft=true and once with isLeft=false. + * Make sure this matches the provided relationships/metadata/oldPlace/newPlace. + * @param relationships the list of sibling Relationships + * @param metadata the list of sibling MDVs + * @param oldPlace the previous place for this Relationship, in case it has been moved. + * Otherwise, the current place of a deleted Relationship + * or the place a Relationship has been inserted. + * @param newPlace The new place for this Relationship. Will be null on insert/delete. + * @return true if the Relationship was moved and newPlace > oldPlace + */ + private boolean resolveRelationshipPlace( + Relationship relationship, boolean isLeft, + List relationships, List metadata, + int oldPlace, Integer newPlace + ) { + boolean movedUp = false; + + if (newPlace != null) { + // We're moving an existing Relationship... + if (newPlace == -1) { + // ...to the end of the list + int nextPlace = getNextPlace(relationships, metadata, isLeft); + if (nextPlace == oldPlace) { + // If this Relationship is already at the end, do nothing. + newPlace = oldPlace; + } else { + // Subtract 1 from the next place since we're moving, not inserting and + // the total number of Relationships stays the same. + newPlace = nextPlace - 1; + } + } + if (newPlace > oldPlace) { + // ...up the list. We have to keep track of this in order to shift correctly later on + movedUp = true; + } + } else if (oldPlace == -1) { + // We're _not_ moving an existing Relationship. The newPlace is already set in the Relationship object. + // We only need to resolve it to the end of the list if it's set to -1, otherwise we can just keep it as is. + newPlace = getNextPlace(relationships, metadata, isLeft); + } + + if (newPlace != null) { + setPlace(relationship, isLeft, newPlace); } - //If useForPlace for the rightwardType is false for the relationshipType, - // we need to sort the relationships here based on the rightplace. - if (!virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), false)) { - if (!rightRelationships.isEmpty()) { - rightRelationships.sort(Comparator.comparingInt(Relationship::getRightPlace)); - for (int i = 0; i < rightRelationships.size(); i++) { - rightRelationships.get(i).setRightPlace(i); + return movedUp; + } + + /** + * Return the index of the next place in a list of Relationships and Metadata. + * By not relying on the size of both lists we can support one-to-many virtual MDV mappings. + * @param isLeft whether to take the left or right place of each Relationship + */ + private int getNextPlace(List relationships, List metadata, boolean isLeft) { + return Stream.concat( + metadata.stream().map(MetadataValue::getPlace), + relationships.stream().map(r -> getPlace(r, isLeft)) + ).max(Integer::compare) + .map(integer -> integer + 1) + .orElse(0); + } + + /** + * Adjust the left/right place of sibling Relationships and MDVs + * + * Examples: with sibling Relationships R,S,T and metadata a,b,c + * - Insert T at place 1 aRbSc -> a T RbSc + * Shift all siblings with place >= 1 one place to the right + * - Delete R from place 2 aT R bSc -> aTbSc + * Shift all siblings with place > 2 one place to the left + * - Move S from place 3 to place 2 (movedUp=false) aTb S c -> aT S bc + * Shift all siblings with 2 < place <= 3 one place to the right + * - Move T from place 1 to place 3 (movedUp=true) a T Sbc -> aSb T c + * Shift all siblings with 1 < place <= 3 one place to the left + * + * @param relationship the Relationship that's being updated + * @param isLeft whether to consider the left side of the Relationship. + * This method should be called twice, once with isLeft=true and once with isLeft=false. + * Make sure this matches the provided relationships/metadata/oldPlace/newPlace. + * @param oldPlace the previous place for this Relationship, in case it has been moved. + * Otherwise, the current place of a deleted Relationship + * or the place a Relationship has been inserted. + * @param movedUp if this Relationship has been moved up the list, e.g. from place 2 to place 4 + * @param deleted whether this Relationship has been deleted + * @param relationships the list of sibling Relationships + * @param metadata the list of sibling MDVs + */ + private void shiftSiblings( + Relationship relationship, boolean isLeft, int oldPlace, boolean movedUp, boolean inserted, boolean deleted, + List relationships, List metadata + ) { + int newPlace = getPlace(relationship, isLeft); + + for (Relationship sibling : relationships) { + // NOTE: If and only if the other side of the relationship has "latest" status, the relationship will appear + // as a metadata value on the item at the current side (indicated by isLeft) of the relationship. + // + // Example: volume <----> issue (LEFT_ONLY) + // => LEFT_ONLY means that the volume has "latest" status, but the issue does NOT have "latest" status + // => the volume will appear in the metadata of the issue, + // but the issue will NOT appear in the metadata of the volume + // + // This means that the other side of the relationship has to have "latest" status, otherwise this + // relationship is NOT relevant for place calculation. + if (relationshipVersioningUtils.otherSideIsLatest(isLeft, sibling.getLatestVersionStatus())) { + int siblingPlace = getPlace(sibling, isLeft); + if ( + (deleted && siblingPlace > newPlace) + // If the relationship was deleted, all relationships after it should shift left + // We must make the distinction between deletes and moves because for inserts oldPlace == newPlace + || (movedUp && siblingPlace <= newPlace && siblingPlace > oldPlace) + // If the relationship was moved up e.g. from place 2 to 5, all relationships + // with place > 2 (the old place) and <= to 5 should shift left + ) { + setPlace(sibling, isLeft, siblingPlace - 1); + } else if ( + (inserted && siblingPlace >= newPlace) + // If the relationship was inserted, all relationships starting from that place should shift right + // We must make the distinction between inserts and moves because for inserts oldPlace == newPlace + || (!movedUp && siblingPlace >= newPlace && siblingPlace < oldPlace) + // If the relationship was moved down e.g. from place 5 to 2, all relationships + // with place >= 2 and < 5 (the old place) should shift right + ) { + setPlace(sibling, isLeft, siblingPlace + 1); } - relationship.setRightPlace(rightRelationships.size()); - } else { - relationship.setRightPlace(0); } + } + for (MetadataValue mdv : metadata) { + // NOTE: Plain text metadata values should ALWAYS be included in the place calculation, + // because they are by definition only visible/relevant to the side of the relationship + // that we are currently processing. + int mdvPlace = mdv.getPlace(); + if ( + (deleted && mdvPlace > newPlace) + // If the relationship was deleted, all metadata after it should shift left + // We must make the distinction between deletes and moves because for inserts oldPlace == newPlace + // If the reltionship was copied to metadata on deletion: + // - the plain text can be after the relationship (in which case it's moved forward again) + // - or before the relationship (in which case it remains in place) + || (movedUp && mdvPlace <= newPlace && mdvPlace > oldPlace) + // If the relationship was moved up e.g. from place 2 to 5, all metadata + // with place > 2 (the old place) and <= to 5 should shift left + ) { + mdv.setPlace(mdvPlace - 1); + } else if ( + (inserted && mdvPlace >= newPlace) + // If the relationship was inserted, all relationships starting from that place should shift right + // We must make the distinction between inserts and moves because for inserts oldPlace == newPlace + || (!movedUp && mdvPlace >= newPlace && mdvPlace < oldPlace) + // If the relationship was moved down e.g. from place 5 to 2, all relationships + // with place >= 2 and < 5 (the old place) should shift right + ) { + mdv.setPlace(mdvPlace + 1); + } + } + } + private int getPlace(Relationship relationship, boolean isLeft) { + if (isLeft) { + return relationship.getLeftPlace(); } else { - updateItem(context, rightItem); - + return relationship.getRightPlace(); } - context.restoreAuthSystemState(); + } + private void setPlace(Relationship relationship, boolean isLeft, int place) { + if (isLeft) { + relationship.setLeftPlace(place); + } else { + relationship.setRightPlace(place); + } } @Override @@ -186,16 +527,6 @@ public void updateItem(Context context, Item relatedItem) itemService.update(context, relatedItem); } - @Override - public int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException { - return relationshipDAO.findNextLeftPlaceByLeftItem(context, item); - } - - @Override - public int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException { - return relationshipDAO.findNextRightPlaceByRightItem(context, item); - } - private boolean isRelationshipValidToCreate(Context context, Relationship relationship) throws SQLException { RelationshipType relationshipType = relationship.getRelationshipType(); @@ -211,15 +542,19 @@ private boolean isRelationshipValidToCreate(Context context, Relationship relati logRelationshipTypeDetailsForError(relationshipType); return false; } - if (!verifyMaxCardinality(context, relationship.getLeftItem(), + if (!relationship.getLatestVersionStatus().equals(LatestVersionStatus.LEFT_ONLY) + && !verifyMaxCardinality(context, relationship.getLeftItem(), relationshipType.getLeftMaxCardinality(), relationshipType, true)) { + //If RIGHT_ONLY => it's a copied relationship, and the count can be ignored log.warn("The relationship has been deemed invalid since the left item has more" + " relationships than the left max cardinality allows after we'd store this relationship"); logRelationshipTypeDetailsForError(relationshipType); return false; } - if (!verifyMaxCardinality(context, relationship.getRightItem(), + if (!relationship.getLatestVersionStatus().equals(LatestVersionStatus.RIGHT_ONLY) + && !verifyMaxCardinality(context, relationship.getRightItem(), relationshipType.getRightMaxCardinality(), relationshipType, false)) { + //If LEFT_ONLY => it's a copied relationship, and the count can be ignored log.warn("The relationship has been deemed invalid since the right item has more" + " relationships than the right max cardinality allows after we'd store this relationship"); logRelationshipTypeDetailsForError(relationshipType); @@ -250,7 +585,7 @@ private boolean verifyMaxCardinality(Context context, Item itemToProcess, } List rightRelationships = findByItemAndRelationshipType(context, itemToProcess, relationshipType, isLeft); - if (maxCardinality != null && rightRelationships.size() >= maxCardinality) { + if (rightRelationships.size() >= maxCardinality) { return false; } return true; @@ -266,6 +601,7 @@ private boolean verifyEntityTypes(Item itemToProcess, EntityType entityTypeToPro return StringUtils.equals(leftEntityType, entityTypeToProcess.getLabel()); } + @Override public Relationship find(Context context, int id) throws SQLException { Relationship relationship = relationshipDAO.findByID(context, Relationship.class, id); return relationship; @@ -277,14 +613,22 @@ public List findByItem(Context context, Item item) throws SQLExcep } @Override - public List findByItem(Context context, Item item, Integer limit, Integer offset, - boolean excludeTilted) throws SQLException { + public List findByItem( + Context context, Item item, Integer limit, Integer offset, boolean excludeTilted + ) throws SQLException { + return findByItem(context, item, limit, offset, excludeTilted, true); + } - List list = relationshipDAO.findByItem(context, item, limit, offset, excludeTilted); + @Override + public List findByItem( + Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException { + List list = + relationshipDAO.findByItem(context, item, limit, offset, excludeTilted, excludeNonLatest); list.sort((o1, o2) -> { int relationshipType = o1.getRelationshipType().getLeftwardType() - .compareTo(o2.getRelationshipType().getLeftwardType()); + .compareTo(o2.getRelationshipType().getLeftwardType()); if (relationshipType != 0) { return relationshipType; } else { @@ -339,7 +683,7 @@ public void delete(Context context, Relationship relationship) throws SQLExcepti @Override public void delete(Context context, Relationship relationship, boolean copyToLeftItem, boolean copyToRightItem) throws SQLException, AuthorizeException { - log.info(org.dspace.core.LogManager.getHeader(context, "delete_relationship", + log.info(org.dspace.core.LogHelper.getHeader(context, "delete_relationship", "relationship_id=" + relationship.getID() + "&" + "copyMetadataValuesToLeftItem=" + copyToLeftItem + "&" + "copyMetadataValuesToRightItem=" + copyToRightItem)); @@ -356,7 +700,7 @@ public void delete(Context context, Relationship relationship, boolean copyToLef @Override public void forceDelete(Context context, Relationship relationship, boolean copyToLeftItem, boolean copyToRightItem) throws SQLException, AuthorizeException { - log.info(org.dspace.core.LogManager.getHeader(context, "delete_relationship", + log.info(org.dspace.core.LogHelper.getHeader(context, "delete_relationship", "relationship_id=" + relationship.getID() + "&" + "copyMetadataValuesToLeftItem=" + copyToLeftItem + "&" + "copyMetadataValuesToRightItem=" + copyToRightItem)); @@ -375,7 +719,7 @@ private void deleteRelationshipAndCopyToItem(Context context, Relationship relat if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) || authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) { relationshipDAO.delete(context, relationship); - updatePlaceInRelationship(context, relationship); + updatePlaceInRelationship(context, relationship, null, null, false, false); updateItemsInRelationship(context, relationship); } else { throw new AuthorizeException( @@ -407,7 +751,7 @@ private void updateItemsInRelationship(Context context, Relationship relationshi // Set a limit on the total depth of relationships to traverse during a relationship change int maxDepth = configurationService.getIntProperty("relationship.update.relateditems.maxdepth", 5); // This is the list containing all items which will have changes to their virtual metadata - List itemsToUpdate = new LinkedList<>(); + List itemsToUpdate = new ArrayList<>(); itemsToUpdate.add(relationship.getLeftItem()); itemsToUpdate.add(relationship.getRightItem()); @@ -448,7 +792,7 @@ private void findModifiedDiscoveryItemsForCurrentItem(Context context, Item item + item.getID() + " due to " + currentDepth + " depth"); return; } - String entityTypeStringFromMetadata = relationshipMetadataService.getEntityTypeStringFromMetadata(item); + String entityTypeStringFromMetadata = itemService.getEntityTypeLabel(item); EntityType actualEntityType = entityTypeService.findByEntityType(context, entityTypeStringFromMetadata); // Get all types of relations for the current item List relationshipTypes = relationshipTypeService.findByEntityType(context, actualEntityType); @@ -508,6 +852,9 @@ private boolean containsVirtualMetadata(String typeToSearchInVirtualMetadata) { /** * Converts virtual metadata from RelationshipMetadataValue objects to actual item metadata. + * The resulting MDVs are added in front or behind the Relationship's virtual MDVs. + * The Relationship's virtual MDVs may be shifted right, and all subsequent metadata will be shifted right. + * So this method ensures the places are still valid. * * @param context The relevant DSpace context * @param relationship The relationship containing the left and right items @@ -518,13 +865,20 @@ private void copyMetadataValues(Context context, Relationship relationship, bool boolean copyToRightItem) throws SQLException, AuthorizeException { if (copyToLeftItem) { - String entityTypeString = relationshipMetadataService - .getEntityTypeStringFromMetadata(relationship.getLeftItem()); + String entityTypeString = itemService.getEntityTypeLabel(relationship.getLeftItem()); List relationshipMetadataValues = relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context, relationship.getLeftItem(), entityTypeString, relationship, true); for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) { - itemService.addAndShiftRightMetadata(context, relationship.getLeftItem(), + // This adds the plain text metadata values on the same spot as the virtual values. + // This will be overruled in org.dspace.content.DSpaceObjectServiceImpl.update + // in the line below but it's not important whether the plain text or virtual values end up on top. + // The virtual values will eventually be deleted, and the others shifted + // This is required because addAndShiftRightMetadata has issues on metadata fields containing + // relationship values which are not useForPlace, while the relationhip type has useForPlace + // E.g. when using addAndShiftRightMetadata on relation.isAuthorOfPublication, it will break the order + // from dc.contributor.author + itemService.addMetadata(context, relationship.getLeftItem(), relationshipMetadataValue.getMetadataField(). getMetadataSchema().getName(), relationshipMetadataValue.getMetadataField().getElement(), @@ -533,16 +887,16 @@ private void copyMetadataValues(Context context, Relationship relationship, bool relationshipMetadataValue.getValue(), null, -1, relationshipMetadataValue.getPlace()); } + //This will ensure the new values no longer overlap, but won't break the order itemService.update(context, relationship.getLeftItem()); } if (copyToRightItem) { - String entityTypeString = relationshipMetadataService - .getEntityTypeStringFromMetadata(relationship.getRightItem()); + String entityTypeString = itemService.getEntityTypeLabel(relationship.getRightItem()); List relationshipMetadataValues = relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context, relationship.getRightItem(), entityTypeString, relationship, true); for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) { - itemService.addAndShiftRightMetadata(context, relationship.getRightItem(), + itemService.addMetadata(context, relationship.getRightItem(), relationshipMetadataValue.getMetadataField(). getMetadataSchema().getName(), relationshipMetadataValue.getMetadataField().getElement(), @@ -636,22 +990,46 @@ public List findByItemAndRelationshipType(Context context, Item it public List findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType) throws SQLException { - return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, -1, -1); + return findByItemAndRelationshipType(context, item, relationshipType, -1, -1, true); } @Override public List findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, int limit, int offset) throws SQLException { - return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, limit, offset); + return findByItemAndRelationshipType(context, item, relationshipType, limit, offset, true); } @Override - public List findByItemAndRelationshipType(Context context, Item item, - RelationshipType relationshipType, boolean isLeft, - int limit, int offset) - throws SQLException { - return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset); + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, int limit, int offset, boolean excludeNonLatest + ) throws SQLException { + return relationshipDAO + .findByItemAndRelationshipType(context, item, relationshipType, limit, offset, excludeNonLatest); + } + + @Override + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset + ) throws SQLException { + return findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset, true); + } + + @Override + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset, + boolean excludeNonLatest + ) throws SQLException { + return relationshipDAO + .findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset, excludeNonLatest); + } + + @Override + public List findByLatestItemAndRelationshipType( + Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft + ) throws SQLException { + return relationshipDAO + .findByLatestItemAndRelationshipType(context, latestItem, relationshipType, isLeft); } @Override @@ -688,7 +1066,14 @@ public int countTotal(Context context) throws SQLException { @Override public int countByItem(Context context, Item item) throws SQLException { - return relationshipDAO.countByItem(context, item); + return countByItem(context, item, false, true); + } + + @Override + public int countByItem( + Context context, Item item, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException { + return relationshipDAO.countByItem(context, item, excludeTilted, excludeNonLatest); } @Override @@ -697,9 +1082,18 @@ public int countByRelationshipType(Context context, RelationshipType relationshi } @Override - public int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, - boolean isLeft) throws SQLException { - return relationshipDAO.countByItemAndRelationshipType(context, item, relationshipType, isLeft); + public int countByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft + ) throws SQLException { + return countByItemAndRelationshipType(context, item, relationshipType, isLeft, true); + } + + @Override + public int countByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest + ) throws SQLException { + return relationshipDAO + .countByItemAndRelationshipType(context, item, relationshipType, isLeft, excludeNonLatest); } @Override @@ -707,4 +1101,19 @@ public int countByTypeName(Context context, String typeName) throws SQLException { return relationshipDAO.countByTypeName(context, typeName); } + + @Override + public List findByItemRelationshipTypeAndRelatedList(Context context, UUID focusUUID, + RelationshipType relationshipType, List items, boolean isLeft, + int offset, int limit) throws SQLException { + return relationshipDAO + .findByItemAndRelationshipTypeAndList(context, focusUUID, relationshipType, items, isLeft, offset,limit); + } + + @Override + public int countByItemRelationshipTypeAndRelatedList(Context context, UUID focusUUID, + RelationshipType relationshipType, List items, boolean isLeft) throws SQLException { + return relationshipDAO + .countByItemAndRelationshipTypeAndList(context, focusUUID, relationshipType, items, isLeft); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipType.java b/dspace-api/src/main/java/org/dspace/content/RelationshipType.java index 9163f46a0ea0..5e6941052b83 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipType.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipType.java @@ -208,7 +208,7 @@ public Integer getLeftMinCardinality() { /** * Standard setter for the leftMinCardinality Integer for this RelationshipType - * @param leftMinCardinality The leftMinCardinality Integer that this RelationshipType should recieve + * @param leftMinCardinality The leftMinCardinality Integer that this RelationshipType should receive */ public void setLeftMinCardinality(Integer leftMinCardinality) { this.leftMinCardinality = leftMinCardinality; @@ -224,7 +224,7 @@ public Integer getLeftMaxCardinality() { /** * Standard setter for the leftMaxCardinality Integer for this RelationshipType - * @param leftMaxCardinality The leftMaxCardinality Integer that this RelationshipType should recieve + * @param leftMaxCardinality The leftMaxCardinality Integer that this RelationshipType should receive */ public void setLeftMaxCardinality(Integer leftMaxCardinality) { this.leftMaxCardinality = leftMaxCardinality; @@ -240,7 +240,7 @@ public Integer getRightMinCardinality() { /** * Standard setter for the rightMinCardinality Integer for this RelationshipType - * @param rightMinCardinality The rightMinCardinality Integer that this RelationshipType should recieve + * @param rightMinCardinality The rightMinCardinality Integer that this RelationshipType should receive */ public void setRightMinCardinality(Integer rightMinCardinality) { this.rightMinCardinality = rightMinCardinality; @@ -256,7 +256,7 @@ public Integer getRightMaxCardinality() { /** * Standard setter for the rightMaxCardinality Integer for this RelationshipType - * @param rightMaxCardinality The rightMaxCardinality Integer that this RelationshipType should recieve + * @param rightMaxCardinality The rightMaxCardinality Integer that this RelationshipType should receive */ public void setRightMaxCardinality(Integer rightMaxCardinality) { this.rightMaxCardinality = rightMaxCardinality; @@ -318,6 +318,7 @@ public enum Tilted { * Standard getter for the ID of this RelationshipType * @return The ID of this RelationshipType */ + @Override public Integer getID() { return id; } diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipTypeServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/RelationshipTypeServiceImpl.java index 29472436bdd7..9e5de89ae28e 100644 --- a/dspace-api/src/main/java/org/dspace/content/RelationshipTypeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/RelationshipTypeServiceImpl.java @@ -174,4 +174,9 @@ public void delete(Context context,RelationshipType relationshipType) throws SQL } relationshipTypeDAO.delete(context, relationshipType); } + + @Override + public int countByEntityType(Context context, EntityType entityType) throws SQLException { + return relationshipTypeDAO.countByEntityType(context, entityType); + } } diff --git a/dspace-api/src/main/java/org/dspace/content/SupervisedItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/SupervisedItemServiceImpl.java deleted file mode 100644 index b0eb77ec2aa8..000000000000 --- a/dspace-api/src/main/java/org/dspace/content/SupervisedItemServiceImpl.java +++ /dev/null @@ -1,40 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.content; - -import java.sql.SQLException; -import java.util.List; - -import org.dspace.content.service.SupervisedItemService; -import org.dspace.content.service.WorkspaceItemService; -import org.dspace.core.Context; -import org.dspace.eperson.EPerson; -import org.springframework.beans.factory.annotation.Autowired; - -public class SupervisedItemServiceImpl implements SupervisedItemService { - - @Autowired(required = true) - protected WorkspaceItemService workspaceItemService; - - protected SupervisedItemServiceImpl() { - - } - - @Override - public List getAll(Context context) - throws SQLException { - return workspaceItemService.findAllSupervisedItems(context); - } - - @Override - public List findbyEPerson(Context context, EPerson ep) - throws SQLException { - return workspaceItemService.findSupervisedItemsByEPerson(context, ep); - } - -} diff --git a/dspace-api/src/main/java/org/dspace/content/WorkspaceItem.java b/dspace-api/src/main/java/org/dspace/content/WorkspaceItem.java index 8049aa976caf..a4c880173bf7 100644 --- a/dspace-api/src/main/java/org/dspace/content/WorkspaceItem.java +++ b/dspace-api/src/main/java/org/dspace/content/WorkspaceItem.java @@ -8,8 +8,6 @@ package org.dspace.content; import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; @@ -17,8 +15,6 @@ import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; -import javax.persistence.JoinTable; -import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.OneToOne; import javax.persistence.SequenceGenerator; @@ -27,7 +23,6 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; import org.dspace.core.Context; import org.dspace.eperson.EPerson; -import org.dspace.eperson.Group; import org.dspace.workflow.WorkflowItem; import org.hibernate.proxy.HibernateProxyHelper; @@ -78,14 +73,6 @@ public class WorkspaceItem @Column(name = "page_reached") private Integer pageReached = -1; - @ManyToMany(fetch = FetchType.LAZY) - @JoinTable( - name = "epersongroup2workspaceitem", - joinColumns = {@JoinColumn(name = "workspace_item_id")}, - inverseJoinColumns = {@JoinColumn(name = "eperson_group_id")} - ) - private final List supervisorGroups = new ArrayList<>(); - /** * Protected constructor, create object using: * {@link org.dspace.content.service.WorkspaceItemService#create(Context, Collection, boolean)} @@ -226,15 +213,4 @@ public void setPublishedBefore(boolean b) { publishedBefore = b; } - public List getSupervisorGroups() { - return supervisorGroups; - } - - void removeSupervisorGroup(Group group) { - supervisorGroups.remove(group); - } - - void addSupervisorGroup(Group group) { - supervisorGroups.add(group); - } } diff --git a/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java index 8fc302f8bf84..b6e7372af184 100644 --- a/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java @@ -12,7 +12,11 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.UUID; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.Util; @@ -20,14 +24,23 @@ import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.dao.WorkspaceItemDAO; +import org.dspace.content.logic.Filter; +import org.dspace.content.logic.FilterUtils; import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.event.Event; +import org.dspace.identifier.DOI; +import org.dspace.identifier.DOIIdentifierProvider; +import org.dspace.identifier.Identifier; +import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.factory.IdentifierServiceFactory; +import org.dspace.identifier.service.DOIService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowService; import org.springframework.beans.factory.annotation.Autowired; @@ -54,6 +67,8 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService { protected ItemService itemService; @Autowired(required = true) protected WorkflowService workflowService; + @Autowired(required = true) + protected DOIService doiService; protected WorkspaceItemServiceImpl() { @@ -66,12 +81,12 @@ public WorkspaceItem find(Context context, int id) throws SQLException { if (workspaceItem == null) { if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_workspace_item", + log.debug(LogHelper.getHeader(context, "find_workspace_item", "not_found,workspace_item_id=" + id)); } } else { if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_workspace_item", + log.debug(LogHelper.getHeader(context, "find_workspace_item", "workspace_item_id=" + id)); } } @@ -80,6 +95,12 @@ public WorkspaceItem find(Context context, int id) throws SQLException { @Override public WorkspaceItem create(Context context, Collection collection, boolean template) + throws AuthorizeException, SQLException { + return create(context, collection, null, template); + } + + @Override + public WorkspaceItem create(Context context, Collection collection, UUID uuid, boolean template) throws AuthorizeException, SQLException { // Check the user has permission to ADD to the collection authorizeService.authorizeAction(context, collection, Constants.ADD); @@ -89,7 +110,12 @@ public WorkspaceItem create(Context context, Collection collection, boolean temp // Create an item - Item item = itemService.create(context, workspaceItem); + Item item; + if (uuid != null) { + item = itemService.create(context, workspaceItem, uuid); + } else { + item = itemService.create(context, workspaceItem); + } item.setSubmitter(context.getCurrentUser()); // Now create the policies for the submitter to modify item and contents @@ -107,10 +133,31 @@ public WorkspaceItem create(Context context, Collection collection, boolean temp authorizeService .addPolicy(context, item, Constants.DELETE, item.getSubmitter(), ResourcePolicy.TYPE_SUBMISSION); - // Copy template if appropriate Item templateItem = collection.getTemplateItem(); + Optional colEntityType = getDSpaceEntityType(collection); + Optional templateItemEntityType = getDSpaceEntityType(templateItem); + + if (template && colEntityType.isPresent() && templateItemEntityType.isPresent() && + !StringUtils.equals(colEntityType.get().getValue(), templateItemEntityType.get().getValue())) { + throw new IllegalStateException("The template item has entity type : (" + + templateItemEntityType.get().getValue() + ") different than collection entity type : " + + colEntityType.get().getValue()); + } + + if (template && colEntityType.isPresent() && templateItemEntityType.isEmpty()) { + MetadataValue original = colEntityType.get(); + MetadataField metadataField = original.getMetadataField(); + MetadataSchema metadataSchema = metadataField.getMetadataSchema(); + // NOTE: dspace.entity.type = does not make sense + // the collection entity type is by default blank when a collection is first created + if (StringUtils.isNotBlank(original.getValue())) { + itemService.addMetadata(context, item, metadataSchema.getName(), metadataField.getElement(), + metadataField.getQualifier(), original.getLanguage(), original.getValue()); + } + } + if (template && (templateItem != null)) { List md = itemService.getMetadata(templateItem, Item.ANY, Item.ANY, Item.ANY, Item.ANY); @@ -124,9 +171,29 @@ public WorkspaceItem create(Context context, Collection collection, boolean temp } itemService.update(context, item); + + // If configured, register identifiers (eg handle, DOI) now. This is typically used with the Show Identifiers + // submission step which previews minted handles and DOIs during the submission process. Default: false + if (DSpaceServicesFactory.getInstance().getConfigurationService() + .getBooleanProperty("identifiers.submission.register", false)) { + try { + // Get map of filters to use for identifier types, while the item is in progress + Map, Filter> filters = FilterUtils.getIdentifierFilters(true); + IdentifierServiceFactory.getInstance().getIdentifierService().register(context, item, filters); + // Look for a DOI and move it to PENDING + DOI doi = doiService.findDOIByDSpaceObject(context, item); + if (doi != null) { + doi.setStatus(DOIIdentifierProvider.PENDING); + doiService.update(context, doi); + } + } catch (IdentifierException e) { + log.error("Could not register identifier(s) for item {}: {}", item.getID(), e.getMessage()); + } + } + workspaceItem.setItem(item); - log.info(LogManager.getHeader(context, "create_workspace_item", + log.info(LogHelper.getHeader(context, "create_workspace_item", "workspace_item_id=" + workspaceItem.getID() + "item_id=" + item.getID() + "collection_id=" + collection.getID())); @@ -137,6 +204,15 @@ public WorkspaceItem create(Context context, Collection collection, boolean temp return workspaceItem; } + private Optional getDSpaceEntityType(DSpaceObject dSpaceObject) { + return Objects.nonNull(dSpaceObject) ? dSpaceObject.getMetadata() + .stream() + .filter(x -> x.getMetadataField().toString('.') + .equalsIgnoreCase("dspace.entity.type")) + .findFirst() + : Optional.empty(); + } + @Override public WorkspaceItem create(Context c, WorkflowItem workflowItem) throws SQLException, AuthorizeException { WorkspaceItem workspaceItem = workspaceItemDAO.create(c, new WorkspaceItem()); @@ -167,16 +243,6 @@ public WorkspaceItem findByItem(Context context, Item item) throws SQLException return workspaceItemDAO.findByItem(context, item); } - @Override - public List findAllSupervisedItems(Context context) throws SQLException { - return workspaceItemDAO.findWithSupervisedGroup(context); - } - - @Override - public List findSupervisedItemsByEPerson(Context context, EPerson ePerson) throws SQLException { - return workspaceItemDAO.findBySupervisedGroupMember(context, ePerson); - } - @Override public List findAll(Context context) throws SQLException { return workspaceItemDAO.findAll(context); @@ -191,7 +257,7 @@ public List findAll(Context context, Integer limit, Integer offse public void update(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException { // Authorisation is checked by the item.update() method below - log.info(LogManager.getHeader(context, "update_workspace_item", + log.info(LogHelper.getHeader(context, "update_workspace_item", "workspace_item_id=" + workspaceItem.getID())); // Update the item @@ -219,14 +285,10 @@ public void deleteAll(Context context, WorkspaceItem workspaceItem) + "original submitter to delete a workspace item"); } - log.info(LogManager.getHeader(context, "delete_workspace_item", + log.info(LogHelper.getHeader(context, "delete_workspace_item", "workspace_item_id=" + workspaceItem.getID() + "item_id=" + item.getID() + "collection_id=" + workspaceItem.getCollection().getID())); - // Need to delete the epersongroup2workspaceitem row first since it refers - // to workspaceitem ID - workspaceItem.getSupervisorGroups().clear(); - // Need to delete the workspaceitem row first since it refers // to item ID workspaceItemDAO.delete(context, workspaceItem); @@ -256,20 +318,12 @@ public void deleteWrapper(Context context, WorkspaceItem workspaceItem) throws S Item item = workspaceItem.getItem(); authorizeService.authorizeAction(context, item, Constants.WRITE); - log.info(LogManager.getHeader(context, "delete_workspace_item", + log.info(LogHelper.getHeader(context, "delete_workspace_item", "workspace_item_id=" + workspaceItem.getID() + "item_id=" + item.getID() + "collection_id=" + workspaceItem.getCollection().getID())); // deleteSubmitPermissions(); - // Need to delete the workspaceitem row first since it refers - // to item ID - try { - workspaceItem.getSupervisorGroups().clear(); - } catch (Exception e) { - log.error("failed to clear supervisor group", e); - } - workspaceItemDAO.delete(context, workspaceItem); } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index f25e2c4646b2..34ba9e8c4550 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -15,7 +15,9 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.stream.Collectors; +import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.app.util.DCInput; @@ -23,14 +25,17 @@ import org.dspace.app.util.DCInputsReader; import org.dspace.app.util.DCInputsReaderException; import org.dspace.app.util.SubmissionConfig; -import org.dspace.app.util.SubmissionConfigReader; import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.content.Collection; import org.dspace.content.MetadataValue; import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.core.Utils; import org.dspace.core.service.PluginService; +import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; import org.dspace.services.ConfigurationService; +import org.dspace.submit.factory.SubmissionServiceFactory; +import org.dspace.submit.service.SubmissionConfigService; import org.springframework.beans.factory.annotation.Autowired; /** @@ -80,13 +85,18 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected Map>> authoritiesFormDefinitions = new HashMap>>(); + // Map of vocabulary authorities to and their index info equivalent + protected Map vocabularyIndexMap = new HashMap<>(); + // the item submission reader - private SubmissionConfigReader itemSubmissionConfigReader; + private SubmissionConfigService submissionConfigService; @Autowired(required = true) protected ConfigurationService configurationService; @Autowired(required = true) protected PluginService pluginService; + @Autowired + private DiscoveryConfigurationService searchConfigurationService; final static String CHOICES_PLUGIN_PREFIX = "choices.plugin."; final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; @@ -126,7 +136,7 @@ public Set getChoiceAuthoritiesNames() { private synchronized void init() { if (!initialized) { try { - itemSubmissionConfigReader = new SubmissionConfigReader(); + submissionConfigService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); } catch (SubmissionConfigReaderException e) { // the system is in an illegal state as the submission definition is not valid throw new IllegalStateException("Error reading the item submission configuration: " + e.getMessage(), @@ -231,7 +241,7 @@ public String getChoiceAuthorityName(String schema, String element, String quali // there is an authority configured for the metadata valid for some collections, // check if it is the requested collection Map controllerFormDef = controllerFormDefinitions.get(fieldKey); - SubmissionConfig submissionConfig = itemSubmissionConfigReader + SubmissionConfig submissionConfig = submissionConfigService .getSubmissionConfigByCollection(collection.getHandle()); String submissionName = submissionConfig.getSubmissionName(); // check if the requested collection has a submission definition that use an authority for the metadata @@ -253,14 +263,14 @@ protected String makeFieldKey(String schema, String element, String qualifier) { } @Override - public void clearCache() { + public void clearCache() throws SubmissionConfigReaderException { controller.clear(); authorities.clear(); presentation.clear(); closed.clear(); controllerFormDefinitions.clear(); authoritiesFormDefinitions.clear(); - itemSubmissionConfigReader = null; + submissionConfigService.reload(); initialized = false; } @@ -310,7 +320,7 @@ private void loadChoiceAuthorityConfigurations() { */ private void autoRegisterChoiceAuthorityFromInputReader() { try { - List submissionConfigs = itemSubmissionConfigReader + List submissionConfigs = submissionConfigService .getAllSubmissionConfigs(Integer.MAX_VALUE, 0); DCInputsReader dcInputsReader = new DCInputsReader(); @@ -481,10 +491,11 @@ private ChoiceAuthority getAuthorityByFieldKeyCollection(String fieldKey, Collec init(); ChoiceAuthority ma = controller.get(fieldKey); if (ma == null && collection != null) { - SubmissionConfigReader configReader; + SubmissionConfigService configReaderService; try { - configReader = new SubmissionConfigReader(); - SubmissionConfig submissionName = configReader.getSubmissionConfigByCollection(collection.getHandle()); + configReaderService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); + SubmissionConfig submissionName = configReaderService + .getSubmissionConfigByCollection(collection.getHandle()); ma = controllerFormDefinitions.get(fieldKey).get(submissionName.getSubmissionName()); } catch (SubmissionConfigReaderException e) { // the system is in an illegal state as the submission definition is not valid @@ -540,4 +551,65 @@ public Choice getParentChoice(String authorityName, String vocabularyId, String HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName); return ma.getParentChoice(authorityName, vocabularyId, locale); } + + @Override + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) { + if (this.vocabularyIndexMap.containsKey(nameVocab)) { + return this.vocabularyIndexMap.get(nameVocab); + } else { + init(); + ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab); + if (source != null && source instanceof DSpaceControlledVocabulary) { + + // First, check if this vocabulary index is disabled + String[] vocabulariesDisabled = configurationService + .getArrayProperty("webui.browse.vocabularies.disabled"); + if (vocabulariesDisabled != null && ArrayUtils.contains(vocabulariesDisabled, nameVocab)) { + // Discard this vocabulary browse index + return null; + } + + Set metadataFields = new HashSet<>(); + Map> formsToFields = this.authoritiesFormDefinitions.get(nameVocab); + for (Map.Entry> formToField : formsToFields.entrySet()) { + metadataFields.addAll(formToField.getValue().stream().map(value -> + StringUtils.replace(value, "_", ".")) + .collect(Collectors.toList())); + } + DiscoverySearchFilterFacet matchingFacet = null; + for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) { + boolean coversAllFieldsFromVocab = true; + for (String fieldFromVocab: metadataFields) { + boolean coversFieldFromVocab = false; + for (String facetMdField: facetConfig.getMetadataFields()) { + if (facetMdField.startsWith(fieldFromVocab)) { + coversFieldFromVocab = true; + break; + } + } + if (!coversFieldFromVocab) { + coversAllFieldsFromVocab = false; + break; + } + } + if (coversAllFieldsFromVocab) { + matchingFacet = facetConfig; + break; + } + } + + // If there is no matching facet, return null to ignore this vocabulary index + if (matchingFacet == null) { + return null; + } + + DSpaceControlledVocabularyIndex vocabularyIndex = + new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields, + matchingFacet); + this.vocabularyIndexMap.put(nameVocab, vocabularyIndex); + return vocabularyIndex; + } + return null; + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java index b1d8cf36a5d3..902bded33ef7 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/DCInputAuthority.java @@ -156,7 +156,8 @@ public Choices getMatches(String query, int start, int limit, String locale) { int found = 0; List v = new ArrayList(); for (int i = 0; i < valuesLocale.length; ++i) { - if (query == null || StringUtils.containsIgnoreCase(valuesLocale[i], query)) { + // In a DCInputAuthority context, a user will want to query the labels, not the values + if (query == null || StringUtils.containsIgnoreCase(labelsLocale[i], query)) { if (found >= start && v.size() < limit) { v.add(new Choice(null, valuesLocale[i], labelsLocale[i])); if (valuesLocale[i].equalsIgnoreCase(query)) { diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java index adce37865d79..16632ee5466b 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabulary.java @@ -30,7 +30,7 @@ import org.xml.sax.InputSource; /** - * ChoiceAuthority source that reads the JSPUI-style hierarchical vocabularies + * ChoiceAuthority source that reads the hierarchical vocabularies * from {@code ${dspace.dir}/config/controlled-vocabularies/*.xml} and turns * them into autocompleting authorities. * @@ -136,7 +136,9 @@ protected void init() { } protected String buildString(Node node) { - if (node.getNodeType() == Node.DOCUMENT_NODE) { + if (node.getNodeType() == Node.DOCUMENT_NODE || ( + node.getParentNode() != null && + node.getParentNode().getNodeType() == Node.DOCUMENT_NODE)) { return (""); } else { String parentValue = buildString(node.getParentNode()); diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java new file mode 100644 index 000000000000..bf8194dbd53b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +import java.util.Set; + +import org.dspace.browse.BrowseIndex; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; + +/** + * Helper class to transform a {@link org.dspace.content.authority.DSpaceControlledVocabulary} into a + * {@code BrowseIndexRest} + * cached by {@link org.dspace.content.authority.service.ChoiceAuthorityService#getVocabularyIndex(String)} + * + * @author Marie Verdonck (Atmire) on 04/05/2023 + */ +public class DSpaceControlledVocabularyIndex extends BrowseIndex { + + protected DSpaceControlledVocabulary vocabulary; + protected Set metadataFields; + protected DiscoverySearchFilterFacet facetConfig; + + public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set metadataFields, + DiscoverySearchFilterFacet facetConfig) { + super(controlledVocabulary.vocabularyName); + this.vocabulary = controlledVocabulary; + this.metadataFields = metadataFields; + this.facetConfig = facetConfig; + } + + public DSpaceControlledVocabulary getVocabulary() { + return vocabulary; + } + + public Set getMetadataFields() { + return this.metadataFields; + } + + public DiscoverySearchFilterFacet getFacetConfig() { + return this.facetConfig; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/authority/EPersonAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/EPersonAuthority.java new file mode 100644 index 000000000000..8d929a8d3bdf --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/EPersonAuthority.java @@ -0,0 +1,127 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.UUID; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.util.UUIDUtils; +import org.dspace.web.ContextUtil; + +/** + * Implementation of {@link ChoiceAuthority} based on EPerson. Allows you to set + * the id of an eperson as authority. + * + * @author Mykhaylo Boychuk (4science.it) + */ +public class EPersonAuthority implements ChoiceAuthority { + + private static final Logger log = LogManager.getLogger(EPersonAuthority.class); + + /** + * the name assigned to the specific instance by the PluginService, @see + * {@link NameAwarePlugin} + **/ + private String authorityName; + + private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + + private AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + + @Override + public Choices getBestMatch(String text, String locale) { + return getMatches(text, 0, 2, locale); + } + + @Override + public Choices getMatches(String text, int start, int limit, String locale) { + if (limit <= 0) { + limit = 20; + } + + Context context = getContext(); + + List ePersons = searchEPersons(context, text, start, limit); + + List choiceList = new ArrayList(); + for (EPerson eperson : ePersons) { + choiceList.add(new Choice(eperson.getID().toString(), eperson.getFullName(), eperson.getFullName())); + } + Choice[] results = new Choice[choiceList.size()]; + results = choiceList.toArray(results); + return new Choices(results, start, ePersons.size(), Choices.CF_AMBIGUOUS, ePersons.size() > (start + limit), 0); + } + + @Override + public String getLabel(String key, String locale) { + + UUID uuid = UUIDUtils.fromString(key); + if (uuid == null) { + return null; + } + + Context context = getContext(); + try { + EPerson ePerson = ePersonService.find(context, uuid); + return ePerson != null ? ePerson.getFullName() : null; + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + + } + + private List searchEPersons(Context context, String text, int start, int limit) { + + if (!isCurrentUserAdminOrAccessGroupManager(context)) { + return Collections.emptyList(); + } + + try { + return ePersonService.search(context, text, start, limit); + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + + } + + private Context getContext() { + Context context = ContextUtil.obtainCurrentRequestContext(); + return context != null ? context : new Context(); + } + + private boolean isCurrentUserAdminOrAccessGroupManager(Context context) { + try { + return authorizeService.isAdmin(context) || authorizeService.isAccountManager(context); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public String getPluginInstanceName() { + return authorityName; + } + + @Override + public void setPluginInstanceName(String name) { + this.authorityName = name; + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java index 497fa08f2faf..123626cd0965 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java @@ -200,8 +200,8 @@ protected void addExternalResults(String text, ArrayList choices, List disseminateList(Context context, DSpaceObject dso) @@ -352,37 +354,29 @@ private List disseminateListInternal(DSpaceObject dso, boolean addSchem if (trip == null) { log.warn("WARNING: " + getPluginInstanceName() + ": No MODS mapping for \"" + qdc + "\""); } else { - try { - Element me = (Element) trip.xml.clone(); - if (addSchema) { - me.setAttribute("schemaLocation", schemaLocation, XSI_NS); - } - Iterator ni = trip.xpath.selectNodes(me).iterator(); - if (!ni.hasNext()) { - log.warn("XPath \"" + trip.xpath.getXPath() + - "\" found no elements in \"" + - outputUgly.outputString(me) + - "\", qdc=" + qdc); - } - while (ni.hasNext()) { - Object what = ni.next(); - if (what instanceof Element) { - ((Element) what).setText(checkedString(value)); - } else if (what instanceof Attribute) { - ((Attribute) what).setValue(checkedString(value)); - } else if (what instanceof Text) { - ((Text) what).setText(checkedString(value)); - } else { - log.warn("Got unknown object from XPath, class=" + what.getClass().getName()); - } + Element me = (Element) trip.xml.clone(); + if (addSchema) { + me.setAttribute("schemaLocation", schemaLocation, XSI_NS); + } + List matches = trip.xpath.evaluate(me); + if (matches.isEmpty()) { + log.warn("XPath \"" + trip.xpath.getExpression() + + "\" found no elements in \"" + + outputUgly.outputString(me) + + "\", qdc=" + qdc); + } + for (Object match: matches) { + if (match instanceof Element) { + ((Element) match).setText(checkedString(value)); + } else if (match instanceof Attribute) { + ((Attribute) match).setValue(checkedString(value)); + } else if (match instanceof Text) { + ((Text) match).setText(checkedString(value)); + } else { + log.warn("Got unknown object from XPath, class=" + match.getClass().getName()); } - result.add(me); - } catch (JDOMException je) { - log.error("Error following XPath in modsTriple: context=" + - outputUgly.outputString(trip.xml) + - ", xpath=" + trip.xpath.getXPath() + ", exception=" + - je.toString()); } + result.add(me); } } return result; @@ -423,9 +417,7 @@ protected List site2Metadata(Site site) { String title = site.getName(); String url = site.getURL(); - if (identifier_uri != null) { - metadata.add(createDCValue("identifier.uri", null, identifier_uri)); - } + metadata.add(createDCValue("identifier.uri", null, identifier_uri)); //FIXME: adding two URIs for now (site handle and URL), in case site isn't using handles if (url != null) { @@ -472,9 +464,7 @@ protected List community2Metadata(Community community) { metadata.add(createDCValue("description", "tableofcontents", description_table)); } - if (identifier_uri != null) { - metadata.add(createDCValue("identifier.uri", null, identifier_uri)); - } + metadata.add(createDCValue("identifier.uri", null, identifier_uri)); if (rights != null) { metadata.add(createDCValue("rights", null, rights)); @@ -526,9 +516,7 @@ protected List collection2Metadata(Collection collection) { metadata.add(createDCValue("description", "tableofcontents", description_table)); } - if (identifier_uri != null) { - metadata.add(createDCValue("identifier", "uri", identifier_uri)); - } + metadata.add(createDCValue("identifier", "uri", identifier_uri)); if (provenance != null) { metadata.add(createDCValue("provenance", null, provenance)); diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/NullIngestionCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/NullIngestionCrosswalk.java index 994e15601dff..562dadaca0bb 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/NullIngestionCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/NullIngestionCrosswalk.java @@ -15,9 +15,9 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; -import org.jdom.Element; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; /** * "Null" ingestion crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/OAIDCIngestionCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/OAIDCIngestionCrosswalk.java index 10bd5ce6fa31..6b0ecae780ce 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/OAIDCIngestionCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/OAIDCIngestionCrosswalk.java @@ -20,8 +20,8 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * DIM ingestion crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/OREDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/OREDisseminationCrosswalk.java index 3dde093784de..ac1c434322a6 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/OREDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/OREDisseminationCrosswalk.java @@ -31,8 +31,8 @@ import org.dspace.core.Utils; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * ORE dissemination crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/OREIngestionCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/OREIngestionCrosswalk.java index 6d7c7161895c..f756aae22577 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/OREIngestionCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/OREIngestionCrosswalk.java @@ -34,27 +34,27 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.jdom.Attribute; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.xpath.XPath; +import org.jdom2.Attribute; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; /** * ORE ingestion crosswalk *

    - * Processes an Atom-encoded ORE resource map and attemps to interpret it as a DSpace item + * Processes an Atom-encoded ORE resource map and attempts to interpret it as a DSpace item. * * @author Alexey Maslov - * @version $Revision: 1 $ */ public class OREIngestionCrosswalk implements IngestionCrosswalk { /** * log4j category */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OREDisseminationCrosswalk.class); + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(); /* Namespaces */ public static final Namespace ATOM_NS = @@ -114,23 +114,21 @@ public void ingest(Context context, DSpaceObject dso, Element root, boolean crea Document doc = new Document(); doc.addContent(root.detach()); - XPath xpathLinks; List aggregatedResources; String entryId; - try { - xpathLinks = XPath.newInstance("/atom:entry/atom:link[@rel=\"" + ORE_NS.getURI() + "aggregates" + "\"]"); - xpathLinks.addNamespace(ATOM_NS); - aggregatedResources = xpathLinks.selectNodes(doc); - - xpathLinks = XPath.newInstance("/atom:entry/atom:link[@rel='alternate']/@href"); - xpathLinks.addNamespace(ATOM_NS); - entryId = ((Attribute) xpathLinks.selectSingleNode(doc)).getValue(); - } catch (JDOMException e) { - throw new CrosswalkException("JDOM exception occurred while ingesting the ORE", e); - } + XPathExpression xpathLinks = + XPathFactory.instance() + .compile("/atom:entry/atom:link[@rel=\"" + ORE_NS.getURI() + "aggregates" + "\"]", + Filters.element(), null, ATOM_NS); + aggregatedResources = xpathLinks.evaluate(doc); + + XPathExpression xpathAltHref = + XPathFactory.instance() + .compile("/atom:entry/atom:link[@rel='alternate']/@href", + Filters.attribute(), null, ATOM_NS); + entryId = xpathAltHref.evaluateFirst(doc).getValue(); // Next for each resource, create a bitstream - XPath xpathDesc; NumberFormat nf = NumberFormat.getInstance(); nf.setGroupingUsed(false); nf.setMinimumIntegerDigits(4); @@ -141,16 +139,12 @@ public void ingest(Context context, DSpaceObject dso, Element root, boolean crea String bundleName; Element desc = null; - try { - xpathDesc = XPath.newInstance( - "/atom:entry/oreatom:triples/rdf:Description[@rdf:about=\"" + this.encodeForURL(href) + "\"][1]"); - xpathDesc.addNamespace(ATOM_NS); - xpathDesc.addNamespace(ORE_ATOM); - xpathDesc.addNamespace(RDF_NS); - desc = (Element) xpathDesc.selectSingleNode(doc); - } catch (JDOMException e) { - e.printStackTrace(); - } + XPathExpression xpathDesc = + XPathFactory.instance() + .compile("/atom:entry/oreatom:triples/rdf:Description[@rdf:about=\"" + + this.encodeForURL(href) + "\"][1]", + Filters.element(), null, ATOM_NS, ORE_ATOM, RDF_NS); + desc = xpathDesc.evaluateFirst(doc); if (desc != null && desc.getChild("type", RDF_NS).getAttributeValue("resource", RDF_NS) .equals(DS_NS.getURI() + "DSpaceBitstream")) { diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/PREMISCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/PREMISCrosswalk.java index e4e387a3ec31..39b6c8f29c80 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/PREMISCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/PREMISCrosswalk.java @@ -30,8 +30,8 @@ import org.dspace.core.Context; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * PREMIS Crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/ParameterizedDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/ParameterizedDisseminationCrosswalk.java index 312aed35434b..5d9322339d0e 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/ParameterizedDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/ParameterizedDisseminationCrosswalk.java @@ -14,7 +14,7 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; -import org.jdom.Element; +import org.jdom2.Element; /** * Translate DSpace native metadata into an external XML format, with parameters. diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/QDCCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/QDCCrosswalk.java index f648a87a0f45..2fdbaaad003e 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/QDCCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/QDCCrosswalk.java @@ -36,10 +36,10 @@ import org.dspace.core.SelfNamedPlugin; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.input.SAXBuilder; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.input.SAXBuilder; /** * Configurable QDC Crosswalk @@ -129,7 +129,7 @@ public class QDCCrosswalk extends SelfNamedPlugin protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - protected static final ConfigurationService configurationService + protected final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); private final CrosswalkMetadataValidator metadataValidator = new CrosswalkMetadataValidator(); @@ -141,9 +141,17 @@ public class QDCCrosswalk extends SelfNamedPlugin private static String aliases[] = null; static { + initStatic(); + } + + /** + * Call this method again in tests to repeat initialization if necessary. + */ + public static void initStatic() { List aliasList = new ArrayList<>(); String propname = CONFIG_PREFIX + ".properties."; - List configKeys = configurationService.getPropertyKeys(propname); + List configKeys = + DSpaceServicesFactory.getInstance().getConfigurationService().getPropertyKeys(propname); for (String key : configKeys) { aliasList.add(key.substring(propname.length())); } @@ -282,7 +290,7 @@ private void init() qdc2element.put(qdc, element); element2qdc.put(makeQualifiedTagName(element), qdc); log.debug("Building Maps: qdc=\"" + qdc + "\", element=\"" + element.toString() + "\""); - } catch (org.jdom.JDOMException je) { + } catch (org.jdom2.JDOMException je) { throw new CrosswalkInternalException( "Failed parsing XML fragment in properties file: \"" + prolog + val + postlog + "\": " + je .toString(), je); @@ -318,6 +326,7 @@ public String getSchemaLocation() { * @throws IOException if IO error * @throws SQLException if database error * @throws AuthorizeException if authorization error + * @return List of Elements */ @Override public List disseminateList(Context context, DSpaceObject dso) diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/RoleCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/RoleCrosswalk.java index d36ff3edf5af..2c763036ce33 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/RoleCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/RoleCrosswalk.java @@ -26,12 +26,12 @@ import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.workflow.WorkflowException; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.input.SAXBuilder; -import org.jdom.output.XMLOutputter; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.input.SAXBuilder; +import org.jdom2.output.XMLOutputter; /** * Role Crosswalk diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/SimpleDCDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/SimpleDCDisseminationCrosswalk.java index 22ec68070aed..2f91c3aa0712 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/SimpleDCDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/SimpleDCDisseminationCrosswalk.java @@ -24,8 +24,8 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.SelfNamedPlugin; -import org.jdom.Element; -import org.jdom.Namespace; +import org.jdom2.Element; +import org.jdom2.Namespace; /** * Disseminator for Simple Dublin Core metadata in XML format. @@ -84,6 +84,7 @@ public Element disseminateElement(Context context, DSpaceObject dso) * @throws IOException if IO error * @throws SQLException if database error * @throws AuthorizeException if authorization error + * @return List of Elements */ @Override public List disseminateList(Context context, DSpaceObject dso) diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/SubscriptionDsoMetadataForEmailCompose.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/SubscriptionDsoMetadataForEmailCompose.java new file mode 100644 index 000000000000..05fda2b97475 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/SubscriptionDsoMetadataForEmailCompose.java @@ -0,0 +1,80 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.crosswalk; + +import static org.dspace.content.Item.ANY; + +import java.io.OutputStream; +import java.io.PrintStream; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Creates a String to be sent as email body for subscriptions + * + * @author Alba Aliu + */ +public class SubscriptionDsoMetadataForEmailCompose implements StreamDisseminationCrosswalk { + + private List metadata = new ArrayList<>(); + + @Autowired + private ItemService itemService; + + @Override + public boolean canDisseminate(Context context, DSpaceObject dso) { + return Objects.nonNull(dso) && dso.getType() == Constants.ITEM; + } + + @Override + public void disseminate(Context context, DSpaceObject dso, OutputStream out) throws SQLException { + if (dso.getType() == Constants.ITEM) { + Item item = (Item) dso; + PrintStream printStream = new PrintStream(out); + for (String actualMetadata : metadata) { + String[] splitted = actualMetadata.split("\\."); + String qualifier = null; + if (splitted.length == 1) { + qualifier = splitted[2]; + } + var metadataValue = itemService.getMetadataFirstValue(item, splitted[0], splitted[1], qualifier, ANY); + printStream.print(metadataValue + " "); + } + String itemURL = HandleServiceFactory.getInstance() + .getHandleService() + .resolveToURL(context, item.getHandle()); + printStream.print(itemURL); + printStream.print("\n"); + printStream.close(); + } + } + + @Override + public String getMIMEType() { + return "text/plain"; + } + + public List getMetadata() { + return metadata; + } + + public void setMetadata(List metadata) { + this.metadata = metadata; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java index d03d2dd8876d..7b25f69ce3f4 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/XHTMLHeadDisseminationCrosswalk.java @@ -34,9 +34,9 @@ import org.dspace.core.SelfNamedPlugin; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.Verifier; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Verifier; /** * Crosswalk for creating appropriate <meta> elements to appear in the @@ -90,17 +90,17 @@ public class XHTMLHeadDisseminationCrosswalk * Maps DSpace metadata field to name to use in XHTML head element, e.g. * dc.creator or dc.description.abstract */ - private Map names; + private final Map names; /** * Maps DSpace metadata field to scheme for that field, if any */ - private Map schemes; + private final Map schemes; /** * Schemas to add -- maps schema.NAME to schema URL */ - private Map schemaURLs; + private final Map schemaURLs; public XHTMLHeadDisseminationCrosswalk() throws IOException { names = new HashMap<>(); @@ -109,17 +109,9 @@ public XHTMLHeadDisseminationCrosswalk() throws IOException { // Read in configuration Properties crosswalkProps = new Properties(); - FileInputStream fis = new FileInputStream(config); - try { + + try (FileInputStream fis = new FileInputStream(config);) { crosswalkProps.load(fis); - } finally { - if (fis != null) { - try { - fis.close(); - } catch (IOException ioe) { - // ignore - } - } } Enumeration e = crosswalkProps.keys(); @@ -178,6 +170,7 @@ public Element disseminateElement(Context context, DSpaceObject dso) * @throws IOException if IO error * @throws SQLException if database error * @throws AuthorizeException if authorization error + * @return List of Elements */ @Override public List disseminateList(Context context, DSpaceObject dso) throws CrosswalkException, diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTCrosswalk.java index 1c85fd82c51e..d4ccebf82e2c 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTCrosswalk.java @@ -21,7 +21,7 @@ import org.dspace.core.SelfNamedPlugin; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Namespace; +import org.jdom2.Namespace; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -130,12 +130,6 @@ protected static String[] makeAliases(String direction) { return aliasList.toArray(new String[aliasList.size()]); } - /** - * We need to force this, because some dependency elsewhere interferes. - */ - private static final String TRANSFORMER_FACTORY_CLASS - = "com.sun.org.apache.xalan.internal.xsltc.trax.TransformerFactoryImpl"; - private Transformer transformer = null; private File transformFile = null; private long transformLastModified = 0; @@ -181,8 +175,7 @@ protected Transformer getTransformer(String direction) { Source transformSource = new StreamSource(new FileInputStream(transformFile)); TransformerFactory transformerFactory - = TransformerFactory.newInstance( - TRANSFORMER_FACTORY_CLASS, null); + = TransformerFactory.newInstance(); transformer = transformerFactory.newTransformer(transformSource); transformLastModified = transformFile.lastModified(); } catch (TransformerConfigurationException | FileNotFoundException e) { diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTDisseminationCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTDisseminationCrosswalk.java index 6c30c1b1a4db..26371b46aab0 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTDisseminationCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTDisseminationCrosswalk.java @@ -18,6 +18,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; @@ -41,14 +42,15 @@ import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.Verifier; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; -import org.jdom.transform.JDOMResult; -import org.jdom.transform.JDOMSource; +import org.jdom2.Content; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Verifier; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; +import org.jdom2.transform.JDOMResult; +import org.jdom2.transform.JDOMSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -244,6 +246,7 @@ public Element disseminateElement(Context context, DSpaceObject dso, * @throws SQLException if database error * @throws AuthorizeException if authorization error * @see DisseminationCrosswalk + * @return List of Elements */ @Override public List disseminateList(Context context, DSpaceObject dso) @@ -268,7 +271,12 @@ public List disseminateList(Context context, DSpaceObject dso) try { JDOMResult result = new JDOMResult(); xform.transform(new JDOMSource(createDIM(dso).getChildren()), result); - return result.getResult(); + List contentList = result.getResult(); + // Transform List into List + List elementList = contentList.stream() + .filter(obj -> obj instanceof Element) + .map(Element.class::cast).collect(Collectors.toList()); + return elementList; } catch (TransformerException e) { LOG.error("Got error: " + e.toString()); throw new CrosswalkInternalException("XSL translation failed: " + e.toString(), e); diff --git a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTIngestionCrosswalk.java b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTIngestionCrosswalk.java index 37a822374d92..63ef5f7336c7 100644 --- a/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTIngestionCrosswalk.java +++ b/dspace-api/src/main/java/org/dspace/content/crosswalk/XSLTIngestionCrosswalk.java @@ -12,6 +12,7 @@ import java.sql.SQLException; import java.util.Iterator; import java.util.List; +import java.util.stream.Collectors; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; @@ -34,13 +35,14 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.factory.CoreServiceFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.input.SAXBuilder; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; -import org.jdom.transform.JDOMResult; -import org.jdom.transform.JDOMSource; +import org.jdom2.Content; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.input.SAXBuilder; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; +import org.jdom2.transform.JDOMResult; +import org.jdom2.transform.JDOMSource; /** * Configurable XSLT-driven ingestion Crosswalk @@ -141,7 +143,12 @@ public void ingest(Context context, DSpaceObject dso, List metadata, try { JDOMResult result = new JDOMResult(); xform.transform(new JDOMSource(metadata), result); - ingestDIM(context, dso, result.getResult(), createMissingMetadataFields); + List contentList = result.getResult(); + // Transform List into List + List elementList = contentList.stream() + .filter(obj -> obj instanceof Element) + .map(Element.class::cast).collect(Collectors.toList()); + ingestDIM(context, dso, elementList, createMissingMetadataFields); } catch (TransformerException e) { log.error("Got error: " + e.toString()); throw new CrosswalkInternalException("XSL Transformation failed: " + e.toString(), e); diff --git a/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java index c1ef92313127..0d7afaa3cd73 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/BitstreamDAO.java @@ -29,7 +29,7 @@ public interface BitstreamDAO extends DSpaceObjectLegacySupportDAO { public Iterator findAll(Context context, int limit, int offset) throws SQLException; - public List findDeletedBitstreams(Context context) throws SQLException; + public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException; public List findDuplicateInternalIdentifier(Context context, Bitstream bitstream) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/content/dao/EntityTypeDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/EntityTypeDAO.java index 4e8a5934ddb1..8dbdc0ef7030 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/EntityTypeDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/EntityTypeDAO.java @@ -8,6 +8,7 @@ package org.dspace.content.dao; import java.sql.SQLException; +import java.util.List; import org.dspace.content.EntityType; import org.dspace.core.Context; @@ -32,4 +33,25 @@ public interface EntityTypeDAO extends GenericDAO { */ public EntityType findByEntityType(Context context, String entityType) throws SQLException; + /** + * + * @param context DSpace context object + * @param names List of Entity type names that you want to retrieve + * @param limit paging limit + * @param offset the position of the first result to return + * @return + * @throws SQLException if database error + */ + public List getEntityTypesByNames(Context context, List names, Integer limit, Integer offset) + throws SQLException; + + /** + * + * @param context DSpace context object + * @param names List of Entity type names that you want to retrieve + * @return + * @throws SQLException If database error + */ + public int countEntityTypesByNames(Context context, List names) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java index 4c391d973b45..49d3527a358a 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/ItemDAO.java @@ -11,7 +11,6 @@ import java.util.Date; import java.util.Iterator; import java.util.List; -import java.util.UUID; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -32,8 +31,22 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO { public Iterator findAll(Context context, boolean archived, int limit, int offset) throws SQLException; + @Deprecated public Iterator findAll(Context context, boolean archived, boolean withdrawn) throws SQLException; + /** + * Find all items that are: + * - NOT in the workspace + * - NOT in the workflow + * - NOT a template item for e.g. a collection + * + * This implies that the result also contains older versions of items and withdrawn items. + * @param context the DSpace context. + * @return iterator over all regular items. + * @throws SQLException if database error. + */ + public Iterator findAllRegularItems(Context context) throws SQLException; + /** * Find all Items modified since a Date. * @@ -66,10 +79,6 @@ public Iterator findBySubmitter(Context context, EPerson eperson, Metadata public Iterator findByMetadataField(Context context, MetadataField metadataField, String value, boolean inArchive) throws SQLException; - public Iterator findByMetadataQuery(Context context, List> listFieldList, - List query_op, List query_val, List collectionUuids, - String regexClause, int offset, int limit) throws SQLException; - public Iterator findByAuthorityValue(Context context, MetadataField metadataField, String authority, boolean inArchive) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java index 4ef26cffcb40..95ec40c7a542 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java @@ -8,10 +8,13 @@ package org.dspace.content.dao; import java.sql.SQLException; +import java.util.Date; import java.util.List; +import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.core.GenericDAO; +import org.dspace.eperson.EPerson; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessQueryParameterContainer; @@ -81,4 +84,40 @@ List search(Context context, ProcessQueryParameterContainer processQuer int countTotalWithParameters(Context context, ProcessQueryParameterContainer processQueryParameterContainer) throws SQLException; + + /** + * Find all the processes with one of the given status and with a creation time + * older than the specified date. + * + * @param context The relevant DSpace context + * @param statuses the statuses of the processes to search for + * @param date the creation date to search for + * @return The list of all Processes which match requirements + * @throws SQLException If something goes wrong + */ + List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) + throws SQLException; + + /** + * Returns a list of all Process objects in the database by the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Process objects in the Database + * @throws SQLException If something goes wrong + */ + List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException; + + /** + * Count all the processes which is related to the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countByUser(Context context, EPerson user) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/RelationshipDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/RelationshipDAO.java index e28cd0b6ac7e..a152b5b90220 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/RelationshipDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/RelationshipDAO.java @@ -9,10 +9,12 @@ import java.sql.SQLException; import java.util.List; +import java.util.UUID; import org.dspace.content.Item; import org.dspace.content.Relationship; import org.dspace.content.RelationshipType; +import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId; import org.dspace.core.Context; import org.dspace.core.GenericDAO; @@ -27,53 +29,38 @@ public interface RelationshipDAO extends GenericDAO { /** * This method returns a list of Relationship objects that have the given Item object * as a leftItem or a rightItem - * @param context The relevant DSpace context - * @param item The item that should be either a leftItem or a rightItem of all - * the Relationship objects in the returned list - * @param excludeTilted If true, excludes tilted relationships - * @return The list of Relationship objects that contain either a left or a - * right item that is equal to the given item - * @throws SQLException If something goes wrong + * @param context The relevant DSpace context + * @param item The item that should be either a leftItem or a rightItem of all + * the Relationship objects in the returned list + * @param excludeTilted If true, excludes tilted relationships + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return The list of Relationship objects that contain either a left or a + * right item that is equal to the given item + * @throws SQLException If something goes wrong */ - List findByItem(Context context, Item item, boolean excludeTilted) throws SQLException; + List findByItem( + Context context, Item item, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException; /** * This method returns a list of Relationship objects that have the given Item object * as a leftItem or a rightItem - * @param context The relevant DSpace context - * @param item The item that should be either a leftItem or a rightItem of all - * the Relationship objects in the returned list - * @param limit paging limit - * @param offset paging offset - * @param excludeTilted If true, excludes tilted relationships - * @return The list of Relationship objects that contain either a left or a - * right item that is equal to the given item - * @throws SQLException If something goes wrong - */ - List findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted) - throws SQLException; - - /** - * This method returns the next leftplace integer to use for a relationship with this item as the leftItem - * - * @param context The relevant DSpace context - * @param item The item to be matched on leftItem - * @return The next integer to be used for the leftplace of a relationship with the given item - * as a left item - * @throws SQLException If something goes wrong - */ - int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException; - - /** - * This method returns the next rightplace integer to use for a relationship with this item as the rightItem - * - * @param context The relevant DSpace context - * @param item The item to be matched on rightItem - * @return The next integer to be used for the rightplace of a relationship with the given item - * as a right item - * @throws SQLException If something goes wrong + * @param context The relevant DSpace context + * @param item The item that should be either a leftItem or a rightItem of all + * the Relationship objects in the returned list + * @param limit paging limit + * @param offset paging offset + * @param excludeTilted If true, excludes tilted relationships + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return The list of Relationship objects that contain either a left or a + * right item that is equal to the given item + * @throws SQLException If something goes wrong */ - int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException; + List findByItem( + Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException; /** * This method returns a list of Relationship objects for the given RelationshipType object. @@ -107,34 +94,69 @@ List findByRelationshipType(Context context, RelationshipType rela * It will construct a list of all Relationship objects that have the given RelationshipType object * as the relationshipType property * @param context The relevant DSpace context + * @param item item to filter by * @param relationshipType The RelationshipType object to be checked on * @param limit paging limit * @param offset paging offset - * @param item item to filter by + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship * @return A list of Relationship objects that have the given RelationshipType object as the * relationshipType property * @throws SQLException If something goes wrong */ - List findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, - Integer limit, Integer offset) throws SQLException; + List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, Integer limit, Integer offset, + boolean excludeNonLatest + ) throws SQLException; /** * This method returns a list of Relationship objects for the given RelationshipType object. * It will construct a list of all Relationship objects that have the given RelationshipType object * as the relationshipType property * @param context The relevant DSpace context + * @param item item to filter by * @param relationshipType The RelationshipType object to be checked on + * @param isLeft Is item left or right * @param limit paging limit * @param offset paging offset - * @param item item to filter by - * @param isLeft Is item left or right + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship * @return A list of Relationship objects that have the given RelationshipType object as the * relationshipType property * @throws SQLException If something goes wrong */ - List findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, - boolean isLeft, Integer limit, Integer offset) - throws SQLException; + List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, Integer limit, Integer offset, + boolean excludeNonLatest + ) throws SQLException; + + /** + * This method returns the UUIDs of all items that have a relationship with the given item, from the perspective + * of the other item. In other words, given a relationship with the given item, the given item should have + * "latest status" in order for the other item uuid to be returned. + * + * This method differs from the "excludeNonLatest" property in other methods, + * because in this method the current item should have "latest status" to return the other item, + * whereas with "excludeNonLatest" the other item should have "latest status" to be returned. + * + * This method is used to index items in solr; when searching for related items of one of the returned uuids, + * the given item should appear as a search result. + * + * NOTE: This method does not return {@link Relationship}s for performance, because doing so would eagerly fetch + * the items on both sides, which is unnecessary. + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type. + * @param context the DSpace context. + * @param latestItem the target item; only relationships where this item has "latest status" should be considered. + * @param relationshipType the relationship type for which relationships should be selected. + * @param isLeft whether the entity type of the item occurs on the left or right side of the relationship type. + * This is redundant in most cases, but necessary because relationship types my have + * the same entity type on both sides. + * @return a list containing pairs of relationship ids and item uuids. + * @throws SQLException if something goes wrong. + */ + public List findByLatestItemAndRelationshipType( + Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft + ) throws SQLException; /** * This method returns a list of Relationship objects for the given typeName @@ -182,28 +204,34 @@ List findByTypeName(Context context, String typeName, Integer limi /** * This method returns a count of Relationship objects that have the given Item object * as a leftItem or a rightItem - * @param context The relevant DSpace context - * @param item The item that should be either a leftItem or a rightItem of all - * the Relationship objects in the returned list + * @param context The relevant DSpace context + * @param item The item that should be either a leftItem or a rightItem of all + * the Relationship objects in the returned list + * @param excludeTilted if true, excludes tilted relationships + * @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version + * that is relevant * @return The list of Relationship objects that contain either a left or a * right item that is equal to the given item * @throws SQLException If something goes wrong */ - int countByItem(Context context, Item item) throws SQLException; + int countByItem(Context context, Item item, boolean excludeTilted, boolean excludeNonLatest) throws SQLException; /** * Count total number of relationships (rows in relationship table) by an item and a relationship type and a boolean * indicating whether the item should be the leftItem or the rightItem * - * @param context context - * @param relationshipType relationship type to filter by - * @param item item to filter by - * @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not + * @param context context + * @param relationshipType relationship type to filter by + * @param item item to filter by + * @param isLeft indicating whether the counted Relationships should have the given Item on the left side + * @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version + * that is relevant * @return total count * @throws SQLException if database error */ - int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft) - throws SQLException; + int countByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest + ) throws SQLException; /** * Count total number of relationships (rows in relationship table) given a typeName @@ -215,4 +243,39 @@ int countByItemAndRelationshipType(Context context, Item item, RelationshipType */ int countByTypeName(Context context, String typeName) throws SQLException; + + /** + * This method is used to retrieve relationships that match focusItem + * on the one hand and matches list of related items elsewhere. + * + * @param context DSpace context object + * @param focusUUID UUID of Item that will match left side if the param isLeft is true otherwise right side + * @param relationshipType Relationship type to filter by + * @param items List of UUID that will use to filter other side respect the focusUUID + * @param isLeft Indicating whether the counted Relationships should have + * the given Item on the left side or not + * @param limit paging limit + * @param offset paging offset + * @return + * @throws SQLException If database error + */ + List findByItemAndRelationshipTypeAndList(Context context, UUID focusUUID, + RelationshipType relationshipType, List items, boolean isLeft, + int offset, int limit) throws SQLException; + + /** + * Count total number of relationships that match focusItem + * on the one hand and matches list of related items elsewhere. + * + * @param context DSpace context object + * @param focusUUID UUID of Item that will match left side if the param isLeft is true otherwise right side + * @param relationshipType Relationship type to filter by + * @param items List of UUID that will use to filter other side respect the focusUUID + * @param isLeft Indicating whether the counted Relationships should have + * the given Item on the left side or not + * @return + * @throws SQLException If database error + */ + int countByItemAndRelationshipTypeAndList(Context context, UUID focusUUID, RelationshipType relationshipType, + List items, boolean isLeft) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/RelationshipTypeDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/RelationshipTypeDAO.java index e451e48cf2b3..f374f2cabcc6 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/RelationshipTypeDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/RelationshipTypeDAO.java @@ -120,4 +120,16 @@ List findByEntityType(Context context, EntityType entityType, List findByEntityType(Context context, EntityType entityType, Boolean isLeft, Integer limit, Integer offset) throws SQLException; + + /** + * Count all RelationshipType objects for which the given EntityType + * is equal to either the leftType or the rightType + * + * @param context DSpace context object + * @param entityType The EntityType object used to check the leftType and rightType properties + * @return Total RelationshipType objects + * @throws SQLException If database error + */ + public int countByEntityType(Context context, EntityType entityType) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/WorkspaceItemDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/WorkspaceItemDAO.java index 4ae8dc620b21..900858b72869 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/WorkspaceItemDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/WorkspaceItemDAO.java @@ -41,10 +41,6 @@ public List findByEPerson(Context context, EPerson ep, Integer li public List findAll(Context context, Integer limit, Integer offset) throws SQLException; - public List findWithSupervisedGroup(Context context) throws SQLException; - - public List findBySupervisedGroupMember(Context context, EPerson ePerson) throws SQLException; - int countRows(Context context) throws SQLException; List> getStageReachedCounts(Context context) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java index 02e3509c311a..0e051625aaee 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamDAOImpl.java @@ -41,13 +41,14 @@ protected BitstreamDAOImpl() { } @Override - public List findDeletedBitstreams(Context context) throws SQLException { + public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Bitstream.class); Root bitstreamRoot = criteriaQuery.from(Bitstream.class); criteriaQuery.select(bitstreamRoot); + criteriaQuery.orderBy(criteriaBuilder.desc(bitstreamRoot.get(Bitstream_.ID))); criteriaQuery.where(criteriaBuilder.equal(bitstreamRoot.get(Bitstream_.deleted), true)); - return list(context, criteriaQuery, false, Bitstream.class, -1, -1); + return list(context, criteriaQuery, false, Bitstream.class, limit, offset); } @@ -67,9 +68,9 @@ public List findDuplicateInternalIdentifier(Context context, Bitstrea @Override public List findBitstreamsWithNoRecentChecksum(Context context) throws SQLException { - Query query = createQuery(context, - "select b from Bitstream b where b not in (select c.bitstream from " + - "MostRecentChecksum c)"); + Query query = createQuery(context, "SELECT b FROM MostRecentChecksum c RIGHT JOIN Bitstream b " + + "ON c.bitstream = b WHERE c IS NULL" ); + return query.getResultList(); } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamFormatDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamFormatDAOImpl.java index 0824c5c343e5..4d9283bbec4d 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamFormatDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/BitstreamFormatDAOImpl.java @@ -23,7 +23,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the BitstreamFormat object. - * This class is responsible for all database calls for the BitstreamFormat object and is autowired by spring + * This class is responsible for all database calls for the BitstreamFormat object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -86,7 +86,7 @@ public BitstreamFormat findByShortDescription(Context context, Root bitstreamFormatRoot = criteriaQuery.from(BitstreamFormat.class); criteriaQuery.select(bitstreamFormatRoot); criteriaQuery.where(criteriaBuilder.equal(bitstreamFormatRoot.get(BitstreamFormat_.shortDescription), desc)); - return uniqueResult(context, criteriaQuery, false, BitstreamFormat.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, BitstreamFormat.class); } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java index b08a57cbe187..c0ef6ea42fce 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/CollectionDAOImpl.java @@ -9,7 +9,7 @@ import java.sql.SQLException; import java.util.AbstractMap; -import java.util.LinkedList; +import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.persistence.Query; @@ -34,7 +34,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the Collection object. - * This class is responsible for all database calls for the Collection object and is autowired by spring + * This class is responsible for all database calls for the Collection object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -95,7 +95,7 @@ public Collection findByTemplateItem(Context context, Item item) throws SQLExcep Root collectionRoot = criteriaQuery.from(Collection.class); criteriaQuery.select(collectionRoot); criteriaQuery.where(criteriaBuilder.equal(collectionRoot.get(Collection_.template), item)); - return uniqueResult(context, criteriaQuery, false, Collection.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, Collection.class); } @Override @@ -119,7 +119,7 @@ public List findAuthorized(Context context, EPerson ePerson, List collectionRoot = criteriaQuery.from(Collection.class); Join join = collectionRoot.join("resourcePolicies"); - List orPredicates = new LinkedList(); + List orPredicates = new ArrayList<>(actions.size()); for (Integer action : actions) { orPredicates.add(criteriaBuilder.equal(join.get(ResourcePolicy_.actionId), action)); } @@ -176,7 +176,7 @@ public List> getCollectionsWithBitstreamSizesTotal(C Query query = createQuery(context, q); List list = query.getResultList(); - List> returnList = new LinkedList<>(); + List> returnList = new ArrayList<>(list.size()); for (Object[] o : list) { returnList.add(new AbstractMap.SimpleEntry<>((Collection) o[0], (Long) o[1])); } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/EntityTypeDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/EntityTypeDAOImpl.java index b9c48c67112c..489f4cd0667d 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/EntityTypeDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/EntityTypeDAOImpl.java @@ -8,8 +8,11 @@ package org.dspace.content.dao.impl; import java.sql.SQLException; +import java.util.LinkedList; +import java.util.List; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Order; import javax.persistence.criteria.Root; import org.dspace.content.EntityType; @@ -18,6 +21,15 @@ import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; +/** + * Hibernate implementation of the Database Access Object interface class for + * the EntityType object. + * This class is responsible for all database calls for the EntityType object + * and is autowired by Spring. + * This class should never be accessed directly. + * + * @author kevinvandevelde at atmire.com + */ public class EntityTypeDAOImpl extends AbstractHibernateDAO implements EntityTypeDAO { @Override @@ -28,6 +40,30 @@ public EntityType findByEntityType(Context context, String entityType) throws SQ criteriaQuery.select(entityTypeRoot); criteriaQuery.where(criteriaBuilder.equal(criteriaBuilder.upper(entityTypeRoot.get(EntityType_.label)), entityType.toUpperCase())); - return uniqueResult(context, criteriaQuery, true, EntityType.class, -1, -1); + return uniqueResult(context, criteriaQuery, true, EntityType.class); + } + + @Override + public List getEntityTypesByNames(Context context, List names, Integer limit, Integer offset) + throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, EntityType.class); + Root entityTypeRoot = criteriaQuery.from(EntityType.class); + List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.desc(entityTypeRoot.get(EntityType_.label))); + criteriaQuery.select(entityTypeRoot).orderBy(orderList); + criteriaQuery.where(entityTypeRoot.get(EntityType_.LABEL).in(names)); + return list(context, criteriaQuery, false, EntityType.class, limit, offset); } + + @Override + public int countEntityTypesByNames(Context context, List names) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, EntityType.class); + Root entityTypeRoot = criteriaQuery.from(EntityType.class); + criteriaQuery.select(entityTypeRoot); + criteriaQuery.where(entityTypeRoot.get(EntityType_.LABEL).in(names)); + return count(context, criteriaQuery, criteriaBuilder, entityTypeRoot); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java index 683a6502c527..5c840f68e998 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ItemDAOImpl.java @@ -12,7 +12,6 @@ import java.util.Date; import java.util.Iterator; import java.util.List; -import java.util.UUID; import javax.persistence.Query; import javax.persistence.TemporalType; import javax.persistence.criteria.CriteriaBuilder; @@ -24,19 +23,10 @@ import org.dspace.content.Item; import org.dspace.content.Item_; import org.dspace.content.MetadataField; -import org.dspace.content.MetadataValue; import org.dspace.content.dao.ItemDAO; import org.dspace.core.AbstractHibernateDSODAO; import org.dspace.core.Context; import org.dspace.eperson.EPerson; -import org.hibernate.Criteria; -import org.hibernate.criterion.Criterion; -import org.hibernate.criterion.DetachedCriteria; -import org.hibernate.criterion.Projections; -import org.hibernate.criterion.Property; -import org.hibernate.criterion.Restrictions; -import org.hibernate.criterion.Subqueries; -import org.hibernate.type.StandardBasicTypes; /** * Hibernate implementation of the Database Access Object interface class for the Item object. @@ -54,14 +44,14 @@ protected ItemDAOImpl() { @Override public Iterator findAll(Context context, boolean archived) throws SQLException { - Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive"); + Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id"); query.setParameter("in_archive", archived); return iterate(query); } @Override public Iterator findAll(Context context, boolean archived, int limit, int offset) throws SQLException { - Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive"); + Query query = createQuery(context, "FROM Item WHERE inArchive=:in_archive ORDER BY id"); query.setParameter("in_archive", archived); query.setFirstResult(offset); query.setMaxResults(limit); @@ -71,12 +61,27 @@ public Iterator findAll(Context context, boolean archived, int limit, int @Override public Iterator findAll(Context context, boolean archived, boolean withdrawn) throws SQLException { - Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive or withdrawn = :withdrawn"); + Query query = createQuery(context, + "FROM Item WHERE inArchive=:in_archive or withdrawn=:withdrawn ORDER BY id"); query.setParameter("in_archive", archived); query.setParameter("withdrawn", withdrawn); return iterate(query); } + @Override + public Iterator findAllRegularItems(Context context) throws SQLException { + // NOTE: This query includes archived items, withdrawn items and older versions of items. + // It does not include workspace, workflow or template items. + Query query = createQuery( + context, + "SELECT i FROM Item as i " + + "LEFT JOIN Version as v ON i = v.item " + + "WHERE i.inArchive=true or i.withdrawn=true or (i.inArchive=false and v.id IS NOT NULL) " + + "ORDER BY i.id" + ); + return iterate(query); + } + @Override public Iterator findAll(Context context, boolean archived, boolean withdrawn, boolean discoverable, Date lastModified) @@ -89,6 +94,7 @@ public Iterator findAll(Context context, boolean archived, if (lastModified != null) { queryStr.append(" AND last_modified > :last_modified"); } + queryStr.append(" ORDER BY i.id"); Query query = createQuery(context, queryStr.toString()); query.setParameter("in_archive", archived); @@ -102,7 +108,8 @@ public Iterator findAll(Context context, boolean archived, @Override public Iterator findBySubmitter(Context context, EPerson eperson) throws SQLException { - Query query = createQuery(context, "FROM Item WHERE inArchive= :in_archive and submitter= :submitter"); + Query query = createQuery(context, + "FROM Item WHERE inArchive=:in_archive and submitter=:submitter ORDER BY id"); query.setParameter("in_archive", true); query.setParameter("submitter", eperson); return iterate(query); @@ -114,7 +121,7 @@ public Iterator findBySubmitter(Context context, EPerson eperson, boolean if (!retrieveAllItems) { return findBySubmitter(context, eperson); } - Query query = createQuery(context, "FROM Item WHERE submitter= :submitter"); + Query query = createQuery(context, "FROM Item WHERE submitter=:submitter ORDER BY id"); query.setParameter("submitter", eperson); return iterate(query); } @@ -146,7 +153,7 @@ public Iterator findByMetadataField(Context context, MetadataField metadat if (value != null) { hqlQueryString += " AND STR(metadatavalue.value) = :text_value"; } - Query query = createQuery(context, hqlQueryString); + Query query = createQuery(context, hqlQueryString + " ORDER BY item.id"); query.setParameter("in_archive", inArchive); query.setParameter("metadata_field", metadataField); @@ -156,125 +163,13 @@ public Iterator findByMetadataField(Context context, MetadataField metadat return iterate(query); } - enum OP { - equals { - public Criterion buildPredicate(String val, String regexClause) { - return Property.forName("mv.value").eq(val); - } - }, - not_equals { - public Criterion buildPredicate(String val, String regexClause) { - return OP.equals.buildPredicate(val, regexClause); - } - }, - like { - public Criterion buildPredicate(String val, String regexClause) { - return Property.forName("mv.value").like(val); - } - }, - not_like { - public Criterion buildPredicate(String val, String regexClause) { - return OP.like.buildPredicate(val, regexClause); - } - }, - contains { - public Criterion buildPredicate(String val, String regexClause) { - return Property.forName("mv.value").like("%" + val + "%"); - } - }, - doesnt_contain { - public Criterion buildPredicate(String val, String regexClause) { - return OP.contains.buildPredicate(val, regexClause); - } - }, - exists { - public Criterion buildPredicate(String val, String regexClause) { - return Property.forName("mv.value").isNotNull(); - } - }, - doesnt_exist { - public Criterion buildPredicate(String val, String regexClause) { - return OP.exists.buildPredicate(val, regexClause); - } - }, - matches { - public Criterion buildPredicate(String val, String regexClause) { - return Restrictions.sqlRestriction(regexClause, val, StandardBasicTypes.STRING); - } - }, - doesnt_match { - public Criterion buildPredicate(String val, String regexClause) { - return OP.matches.buildPredicate(val, regexClause); - } - - }; - public abstract Criterion buildPredicate(String val, String regexClause); - } - - @Override - @Deprecated - public Iterator findByMetadataQuery(Context context, List> listFieldList, - List query_op, List query_val, List collectionUuids, - String regexClause, int offset, int limit) throws SQLException { - - Criteria criteria = getHibernateSession(context).createCriteria(Item.class, "item"); - criteria.setFirstResult(offset); - criteria.setMaxResults(limit); - - if (!collectionUuids.isEmpty()) { - DetachedCriteria dcollCriteria = DetachedCriteria.forClass(Collection.class, "coll"); - dcollCriteria.setProjection(Projections.property("coll.id")); - dcollCriteria.add(Restrictions.eqProperty("coll.id", "item.owningCollection")); - dcollCriteria.add(Restrictions.in("coll.id", collectionUuids)); - criteria.add(Subqueries.exists(dcollCriteria)); - } - - int index = Math.min(listFieldList.size(), Math.min(query_op.size(), query_val.size())); - StringBuilder sb = new StringBuilder(); - - for (int i = 0; i < index; i++) { - OP op = OP.valueOf(query_op.get(i)); - if (op == null) { - log.warn("Skipping Invalid Operator: " + query_op.get(i)); - continue; - } - - if (op == OP.matches || op == OP.doesnt_match) { - if (regexClause.isEmpty()) { - log.warn("Skipping Unsupported Regex Operator: " + query_op.get(i)); - continue; - } - } - - DetachedCriteria subcriteria = DetachedCriteria.forClass(MetadataValue.class, "mv"); - subcriteria.add(Property.forName("mv.dSpaceObject").eqProperty("item.id")); - subcriteria.setProjection(Projections.property("mv.dSpaceObject")); - - if (!listFieldList.get(i).isEmpty()) { - subcriteria.add(Restrictions.in("metadataField", listFieldList.get(i))); - } - - subcriteria.add(op.buildPredicate(query_val.get(i), regexClause)); - - if (op == OP.exists || op == OP.equals || op == OP.like || op == OP.contains || op == OP.matches) { - criteria.add(Subqueries.exists(subcriteria)); - } else { - criteria.add(Subqueries.notExists(subcriteria)); - } - } - log.debug(String.format("Running custom query with %d filters", index)); - - return ((List) criteria.list()).iterator(); - - } - @Override public Iterator findByAuthorityValue(Context context, MetadataField metadataField, String authority, boolean inArchive) throws SQLException { Query query = createQuery(context, "SELECT item FROM Item as item join item.metadata metadatavalue " + "WHERE item.inArchive=:in_archive AND metadatavalue.metadataField = :metadata_field AND " + - "metadatavalue.authority = :authority"); + "metadatavalue.authority = :authority ORDER BY item.id"); query.setParameter("in_archive", inArchive); query.setParameter("metadata_field", metadataField); query.setParameter("authority", authority); @@ -286,7 +181,7 @@ public Iterator findArchivedByCollection(Context context, Collection colle Integer offset) throws SQLException { Query query = createQuery(context, "select i from Item i join i.collections c " + - "WHERE :collection IN c AND i.inArchive=:in_archive"); + "WHERE :collection IN c AND i.inArchive=:in_archive ORDER BY i.id"); query.setParameter("collection", collection); query.setParameter("in_archive", true); if (offset != null) { @@ -309,6 +204,8 @@ public Iterator findArchivedByCollectionExcludingOwning(Context context, C criteriaBuilder.notEqual(itemRoot.get(Item_.owningCollection), collection), criteriaBuilder.isMember(collection, itemRoot.get(Item_.collections)), criteriaBuilder.isTrue(itemRoot.get(Item_.inArchive)))); + criteriaQuery.orderBy(criteriaBuilder.asc(itemRoot.get(Item_.id))); + criteriaQuery.groupBy(itemRoot.get(Item_.id)); return list(context, criteriaQuery, false, Item.class, limit, offset).iterator(); } @@ -327,7 +224,8 @@ public int countArchivedByCollectionExcludingOwning(Context context, Collection @Override public Iterator findAllByCollection(Context context, Collection collection) throws SQLException { - Query query = createQuery(context, "select i from Item i join i.collections c WHERE :collection IN c"); + Query query = createQuery(context, + "select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); query.setParameter("collection", collection); return iterate(query); @@ -336,7 +234,8 @@ public Iterator findAllByCollection(Context context, Collection collection @Override public Iterator findAllByCollection(Context context, Collection collection, Integer limit, Integer offset) throws SQLException { - Query query = createQuery(context, "select i from Item i join i.collections c WHERE :collection IN c"); + Query query = createQuery(context, + "select i from Item i join i.collections c WHERE :collection IN c ORDER BY i.id"); query.setParameter("collection", collection); if (offset != null) { @@ -381,7 +280,8 @@ public int countItems(Context context, List collections, boolean inc @Override public Iterator findByLastModifiedSince(Context context, Date since) throws SQLException { - Query query = createQuery(context, "SELECT i FROM item i WHERE last_modified > :last_modified"); + Query query = createQuery(context, + "SELECT i FROM Item i WHERE last_modified > :last_modified ORDER BY id"); query.setParameter("last_modified", since, TemporalType.TIMESTAMP); return iterate(query); } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataSchemaDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataSchemaDAOImpl.java index 80198a1e890e..71eb487b8395 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataSchemaDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/MetadataSchemaDAOImpl.java @@ -8,7 +8,7 @@ package org.dspace.content.dao.impl; import java.sql.SQLException; -import java.util.LinkedList; +import java.util.ArrayList; import java.util.List; import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; @@ -63,7 +63,7 @@ public List findAll(Context context, Class clazz) throws SQLExce Root metadataSchemaRoot = criteriaQuery.from(MetadataSchema.class); criteriaQuery.select(metadataSchemaRoot); - List orderList = new LinkedList<>(); + List orderList = new ArrayList<>(); orderList.add(criteriaBuilder.asc(metadataSchemaRoot.get(MetadataSchema_.id))); criteriaQuery.orderBy(orderList); diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java index 5c8083a86b6f..d719b5006c14 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java @@ -7,7 +7,10 @@ */ package org.dspace.content.dao.impl; +import static org.dspace.scripts.Process_.CREATION_TIME; + import java.sql.SQLException; +import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -17,9 +20,11 @@ import javax.persistence.criteria.Root; import org.apache.commons.lang3.StringUtils; +import org.dspace.content.ProcessStatus; import org.dspace.content.dao.ProcessDAO; import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessQueryParameterContainer; import org.dspace.scripts.Process_; @@ -147,6 +152,50 @@ public int countTotalWithParameters(Context context, ProcessQueryParameterContai } + @Override + public List findByStatusAndCreationTimeOlderThan(Context context, List statuses, + Date date) throws SQLException { + + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + + Predicate creationTimeLessThanGivenDate = criteriaBuilder.lessThan(processRoot.get(CREATION_TIME), date); + Predicate statusIn = processRoot.get(Process_.PROCESS_STATUS).in(statuses); + criteriaQuery.where(criteriaBuilder.and(creationTimeLessThanGivenDate, statusIn)); + + return list(context, criteriaQuery, false, Process.class, -1, -1); + } + + @Override + public List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user)); + + List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.desc(processRoot.get(Process_.PROCESS_ID))); + criteriaQuery.orderBy(orderList); + + return list(context, criteriaQuery, false, Process.class, limit, offset); + } + + @Override + public int countByUser(Context context, EPerson user) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user)); + return count(context, criteriaQuery, criteriaBuilder, processRoot); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipDAOImpl.java index db1aef96a200..e2f84bc1cb64 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipDAOImpl.java @@ -10,16 +10,23 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; +import javax.persistence.Query; +import javax.persistence.Tuple; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import org.dspace.content.Item; +import org.dspace.content.Item_; import org.dspace.content.Relationship; import org.dspace.content.RelationshipType; import org.dspace.content.RelationshipType_; import org.dspace.content.Relationship_; import org.dspace.content.dao.RelationshipDAO; +import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.RelationshipTypeService; import org.dspace.core.AbstractHibernateDAO; @@ -28,93 +35,150 @@ public class RelationshipDAOImpl extends AbstractHibernateDAO implements RelationshipDAO { @Override - public List findByItem(Context context, Item item, boolean excludeTilted) throws SQLException { - return findByItem(context, item, -1, -1, excludeTilted); + public List findByItem( + Context context, Item item, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException { + return findByItem(context, item, -1, -1, excludeTilted, excludeNonLatest); } @Override - public List findByItem(Context context, Item item, Integer limit, Integer offset, - boolean excludeTilted) throws SQLException { - + public List findByItem( + Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); Root relationshipRoot = criteriaQuery.from(Relationship.class); criteriaQuery.select(relationshipRoot); - if (excludeTilted) { - // If this item is the left item, - // return relationships for types which are not tilted right (tilted is either left nor null) - // If this item is the right item, - // return relationships for types which are not tilted left (tilted is either right nor null) - criteriaQuery - .where(criteriaBuilder.or( - criteriaBuilder.and( - criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), - criteriaBuilder.or( - criteriaBuilder.isNull(relationshipRoot.get(Relationship_.relationshipType) - .get(RelationshipType_.tilted)), - criteriaBuilder.notEqual(relationshipRoot - .get(Relationship_.relationshipType) - .get(RelationshipType_.tilted), RelationshipType.Tilted.RIGHT))), - criteriaBuilder.and( - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item), - criteriaBuilder.or( - criteriaBuilder.isNull(relationshipRoot.get(Relationship_.relationshipType) - .get(RelationshipType_.tilted)), - criteriaBuilder.notEqual(relationshipRoot - .get(Relationship_.relationshipType) - .get(RelationshipType_.tilted), RelationshipType.Tilted.LEFT))))); - } else { - criteriaQuery - .where(criteriaBuilder.or(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item))); - } + + criteriaQuery.where( + criteriaBuilder.or( + getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest), + getRightItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest) + ) + ); + return list(context, criteriaQuery, false, Relationship.class, limit, offset); } - @Override - public int countByItem(Context context, Item item) - throws SQLException { + /** + * Get the predicate for a criteria query that selects relationships by their left item. + * @param criteriaBuilder the criteria builder. + * @param relationshipRoot the relationship root. + * @param item the item that is being searched for. + * @param excludeTilted if true, exclude tilted relationships. + * @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version + * that is relevant. + * @return a predicate that satisfies the given restrictions. + */ + protected Predicate getLeftItemPredicate( + CriteriaBuilder criteriaBuilder, Root relationshipRoot, Item item, + boolean excludeTilted, boolean excludeNonLatest + ) { + List predicates = new ArrayList<>(); - CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); - Root relationshipRoot = criteriaQuery.from(Relationship.class); - criteriaQuery.select(relationshipRoot); - criteriaQuery - .where(criteriaBuilder.or(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item))); - return count(context, criteriaQuery, criteriaBuilder, relationshipRoot); + // match relationships based on the left item + predicates.add( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item) + ); + + if (excludeTilted) { + // if this item is the left item, + // return relationships for types which are NOT tilted right (tilted is either left nor null) + predicates.add( + criteriaBuilder.or( + criteriaBuilder.isNull( + relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted) + ), + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted), + RelationshipType.Tilted.RIGHT + ) + ) + ); + } + + if (excludeNonLatest) { + // if this item is the left item, + // return relationships for which the right item is the "latest" version that is relevant. + predicates.add( + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS), + Relationship.LatestVersionStatus.LEFT_ONLY + ) + ); + } + + return criteriaBuilder.and(predicates.toArray(new Predicate[]{})); } - @Override - public int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException { - CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); - Root relationshipRoot = criteriaQuery.from(Relationship.class); - criteriaQuery.select(relationshipRoot); - criteriaQuery.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)); - List list = list(context, criteriaQuery, false, Relationship.class, -1, -1); - list.sort((o1, o2) -> o2.getLeftPlace() - o1.getLeftPlace()); - if (!list.isEmpty()) { - return list.get(0).getLeftPlace() + 1; - } else { - return 0; + /** + * Get the predicate for a criteria query that selects relationships by their right item. + * @param criteriaBuilder the criteria builder. + * @param relationshipRoot the relationship root. + * @param item the item that is being searched for. + * @param excludeTilted if true, exclude tilted relationships. + * @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version + * that is relevant. + * @return a predicate that satisfies the given restrictions. + */ + protected Predicate getRightItemPredicate( + CriteriaBuilder criteriaBuilder, Root relationshipRoot, Item item, + boolean excludeTilted, boolean excludeNonLatest + ) { + List predicates = new ArrayList<>(); + + // match relationships based on the right item + predicates.add( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item) + ); + + if (excludeTilted) { + // if this item is the right item, + // return relationships for types which are NOT tilted left (tilted is either right nor null) + predicates.add( + criteriaBuilder.or( + criteriaBuilder.isNull( + relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted) + ), + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted), + RelationshipType.Tilted.LEFT + ) + ) + ); + } + + if (excludeNonLatest) { + // if this item is the right item, + // return relationships for which the left item is the "latest" version that is relevant. + predicates.add( + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS), + Relationship.LatestVersionStatus.RIGHT_ONLY + ) + ); } + + return criteriaBuilder.and(predicates.toArray(new Predicate[]{})); } @Override - public int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException { + public int countByItem( + Context context, Item item, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); Root relationshipRoot = criteriaQuery.from(Relationship.class); criteriaQuery.select(relationshipRoot); - criteriaQuery.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)); - List list = list(context, criteriaQuery, false, Relationship.class, -1, -1); - list.sort((o1, o2) -> o2.getRightPlace() - o1.getRightPlace()); - if (!list.isEmpty()) { - return list.get(0).getRightPlace() + 1; - } else { - return 0; - } + + criteriaQuery.where( + criteriaBuilder.or( + getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest), + getRightItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest) + ) + ); + + return count(context, criteriaQuery, criteriaBuilder, relationshipRoot); } @Override @@ -138,49 +202,132 @@ public List findByRelationshipType(Context context, RelationshipTy } @Override - public List findByItemAndRelationshipType(Context context, Item item, - RelationshipType relationshipType, Integer limit, - Integer offset) - throws SQLException { - + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, Integer limit, Integer offset, + boolean excludeNonLatest + ) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); Root relationshipRoot = criteriaQuery.from(Relationship.class); criteriaQuery.select(relationshipRoot); - criteriaQuery - .where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), - relationshipType), criteriaBuilder.or - (criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item))); + + criteriaQuery.where( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType), + criteriaBuilder.or( + getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest), + getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest) + ) + ); + return list(context, criteriaQuery, true, Relationship.class, limit, offset); } @Override - public List findByItemAndRelationshipType(Context context, Item item, - RelationshipType relationshipType, boolean isLeft, - Integer limit, Integer offset) - throws SQLException { - + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, Integer limit, Integer offset, + boolean excludeNonLatest + ) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); Root relationshipRoot = criteriaQuery.from(Relationship.class); criteriaQuery.select(relationshipRoot); + if (isLeft) { - criteriaQuery - .where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), - relationshipType), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)); + criteriaQuery.where( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType), + getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest) + ); criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.leftPlace))); } else { - criteriaQuery - .where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), - relationshipType), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)); + criteriaQuery.where( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType), + getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest) + ); criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.rightPlace))); } + return list(context, criteriaQuery, true, Relationship.class, limit, offset); } + @Override + public List findByLatestItemAndRelationshipType( + Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft + ) throws SQLException { + final String relationshipIdAlias = "relationshipId"; + final String itemUuidAlias = "itemUuid"; + + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + Root relationshipRoot = criteriaQuery.from(Relationship.class); + + ArrayList predicates = new ArrayList<>(); + + // all relationships should have the specified relationship type + predicates.add( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType) + ); + + if (isLeft) { + // match relationships based on the left item + predicates.add( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), latestItem) + ); + + // the left item has to have "latest status" => accept BOTH and LEFT_ONLY + predicates.add( + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS), + Relationship.LatestVersionStatus.RIGHT_ONLY + ) + ); + + // return the UUIDs of the right item + criteriaQuery.multiselect( + relationshipRoot.get(Relationship_.id).alias(relationshipIdAlias), + relationshipRoot.get(Relationship_.rightItem).get(Item_.id).alias(itemUuidAlias) + ); + } else { + // match relationships based on the right item + predicates.add( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), latestItem) + ); + + // the right item has to have "latest status" => accept BOTH and RIGHT_ONLY + predicates.add( + criteriaBuilder.notEqual( + relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS), + Relationship.LatestVersionStatus.LEFT_ONLY + ) + ); + + // return the UUIDs of the left item + criteriaQuery.multiselect( + relationshipRoot.get(Relationship_.id).alias(relationshipIdAlias), + relationshipRoot.get(Relationship_.leftItem).get(Item_.id).alias(itemUuidAlias) + ); + } + + // all predicates are chained with the AND operator + criteriaQuery.where(predicates.toArray(new Predicate[]{})); + + // deduplicate result + criteriaQuery.distinct(true); + + // execute query + Query query = this.getHibernateSession(context).createQuery(criteriaQuery); + query.setHint("org.hibernate.cacheable", true); + List resultList = query.getResultList(); + + // convert types + return resultList.stream() + .map(Tuple.class::cast) + .map(t -> new ItemUuidAndRelationshipId( + (UUID) t.get(itemUuidAlias), + (Integer) t.get(relationshipIdAlias) + )) + .collect(Collectors.toList()); + } + @Override public List findByTypeName(Context context, String typeName) throws SQLException { @@ -226,24 +373,26 @@ public int countRows(Context context) throws SQLException { } @Override - public int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, - boolean isLeft) throws SQLException { - + public int countByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest + ) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); Root relationshipRoot = criteriaQuery.from(Relationship.class); criteriaQuery.select(relationshipRoot); + if (isLeft) { - criteriaQuery - .where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), - relationshipType), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)); + criteriaQuery.where( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType), + getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest) + ); } else { - criteriaQuery - .where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), - relationshipType), - criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)); + criteriaQuery.where( + criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType), + getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest) + ); } + return count(context, criteriaQuery, criteriaBuilder, relationshipRoot); } @@ -264,4 +413,37 @@ public int countByTypeName(Context context, String typeName) return count(context, criteriaQuery, criteriaBuilder, relationshipRoot); } + @Override + public List findByItemAndRelationshipTypeAndList(Context context, UUID focusUUID, + RelationshipType relationshipType, List items, boolean isLeft, + int offset, int limit) throws SQLException { + String side = isLeft ? "left_id" : "right_id"; + String otherSide = !isLeft ? "left_id" : "right_id"; + Query query = createQuery(context, "FROM " + Relationship.class.getSimpleName() + + " WHERE type_id = (:typeId) " + + "AND " + side + " = (:focusUUID) " + + "AND " + otherSide + " in (:list) " + + "ORDER BY id"); + query.setParameter("typeId", relationshipType.getID()); + query.setParameter("focusUUID", focusUUID); + query.setParameter("list", items); + return list(query, limit, offset); + } + + @Override + public int countByItemAndRelationshipTypeAndList(Context context, UUID focusUUID, RelationshipType relationshipType, + List items, boolean isLeft) throws SQLException { + String side = isLeft ? "left_id" : "right_id"; + String otherSide = !isLeft ? "left_id" : "right_id"; + Query query = createQuery(context, "SELECT count(*) " + + "FROM " + Relationship.class.getSimpleName() + + " WHERE type_id = (:typeId) " + + "AND " + side + " = (:focusUUID) " + + "AND " + otherSide + " in (:list)"); + query.setParameter("typeId", relationshipType.getID()); + query.setParameter("focusUUID", focusUUID); + query.setParameter("list", items); + return count(query); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipTypeDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipTypeDAOImpl.java index 96d4bf68fb53..7fff2a1f57da 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipTypeDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/RelationshipTypeDAOImpl.java @@ -8,6 +8,7 @@ package org.dspace.content.dao.impl; import java.sql.SQLException; +import java.util.LinkedList; import java.util.List; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; @@ -20,6 +21,15 @@ import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; +/** + * Hibernate implementation of the Database Access Object interface class for + * the RelationshipType object. + * This class is responsible for all database calls for the RelationshipType + * object and is autowired by Spring. + * This class should never be accessed directly. + * + * @author kevinvandevelde at atmire.com + */ public class RelationshipTypeDAOImpl extends AbstractHibernateDAO implements RelationshipTypeDAO { @Override @@ -36,7 +46,7 @@ public RelationshipType findbyTypesAndTypeName(Context context, EntityType leftT criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.rightType), rightType), criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.leftwardType), leftwardType), criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.rightwardType), rightwardType))); - return uniqueResult(context, criteriaQuery, false, RelationshipType.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, RelationshipType.class); } @Override @@ -83,6 +93,9 @@ public List findByEntityType(Context context, EntityType entit .equal(relationshipTypeRoot.get(RelationshipType_.rightType), entityType) ) ); + List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.asc(relationshipTypeRoot.get(RelationshipType_.ID))); + criteriaQuery.orderBy(orderList); return list(context, criteriaQuery, false, RelationshipType.class, limit, offset); } @@ -111,4 +124,18 @@ public List findByEntityType(Context context, EntityType entit } return list(context, criteriaQuery, false, RelationshipType.class, limit, offset); } + + @Override + public int countByEntityType(Context context, EntityType entityType) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, RelationshipType.class); + Root relationshipTypeRoot = criteriaQuery.from(RelationshipType.class); + criteriaQuery.select(relationshipTypeRoot); + criteriaQuery.where(criteriaBuilder.or( + criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.leftType), entityType), + criteriaBuilder.equal(relationshipTypeRoot.get(RelationshipType_.rightType), entityType) + )); + return count(context, criteriaQuery, criteriaBuilder, relationshipTypeRoot); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/SiteDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/SiteDAOImpl.java index 2ca07c51b91b..9c50a998ee64 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/SiteDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/SiteDAOImpl.java @@ -20,7 +20,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the Site object. - * This class is responsible for all database calls for the Site object and is autowired by spring + * This class is responsible for all database calls for the Site object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -47,7 +47,7 @@ public Site findSite(Context context) throws SQLException { CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Site.class); Root siteRoot = criteriaQuery.from(Site.class); criteriaQuery.select(siteRoot); - Site site = uniqueResult(context, criteriaQuery, true, Site.class, -1, -1); + Site site = uniqueResult(context, criteriaQuery, true, Site.class); if (site != null) { cachedSiteId = site.getID(); } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/WorkspaceItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/WorkspaceItemDAOImpl.java index 8e5e80389890..138451365522 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/WorkspaceItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/WorkspaceItemDAOImpl.java @@ -15,7 +15,6 @@ import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Join; import javax.persistence.criteria.Root; import org.dspace.content.Collection; @@ -26,12 +25,10 @@ import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; import org.dspace.eperson.EPerson; -import org.dspace.eperson.EPerson_; -import org.dspace.eperson.Group; /** * Hibernate implementation of the Database Access Object interface class for the WorkspaceItem object. - * This class is responsible for all database calls for the WorkspaceItem object and is autowired by spring + * This class is responsible for all database calls for the WorkspaceItem object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -81,7 +78,7 @@ public WorkspaceItem findByItem(Context context, Item i) throws SQLException { Root workspaceItemRoot = criteriaQuery.from(WorkspaceItem.class); criteriaQuery.select(workspaceItemRoot); criteriaQuery.where(criteriaBuilder.equal(workspaceItemRoot.get(WorkspaceItem_.item), i)); - return uniqueResult(context, criteriaQuery, false, WorkspaceItem.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, WorkspaceItem.class); } @Override @@ -114,33 +111,6 @@ public List findAll(Context context, Integer limit, Integer offse return list(context, criteriaQuery, false, WorkspaceItem.class, limit, offset); } - @Override - public List findWithSupervisedGroup(Context context) throws SQLException { - CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, WorkspaceItem.class); - Root workspaceItemRoot = criteriaQuery.from(WorkspaceItem.class); - criteriaQuery.select(workspaceItemRoot); - criteriaQuery.where(criteriaBuilder.isNotEmpty(workspaceItemRoot.get(WorkspaceItem_.supervisorGroups))); - - List orderList = new LinkedList<>(); - orderList.add(criteriaBuilder.asc(workspaceItemRoot.get(WorkspaceItem_.workspaceItemId))); - criteriaQuery.orderBy(orderList); - return list(context, criteriaQuery, false, WorkspaceItem.class, -1, -1); - } - - @Override - public List findBySupervisedGroupMember(Context context, EPerson ePerson) throws SQLException { - CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, WorkspaceItem.class); - Root workspaceItemRoot = criteriaQuery.from(WorkspaceItem.class); - Join join = workspaceItemRoot.join("supervisorGroups"); - Join secondJoin = join.join("epeople"); - criteriaQuery.select(workspaceItemRoot); - criteriaQuery.where(criteriaBuilder.equal(secondJoin.get(EPerson_.id), ePerson.getID())); - criteriaQuery.orderBy(criteriaBuilder.asc(workspaceItemRoot.get(WorkspaceItem_.workspaceItemId))); - return list(context, criteriaQuery, false, WorkspaceItem.class, -1, -1); - } - @Override public int countRows(Context context) throws SQLException { return count(createQuery(context, "SELECT count(*) from WorkspaceItem")); diff --git a/dspace-api/src/main/java/org/dspace/content/dao/pojo/ItemUuidAndRelationshipId.java b/dspace-api/src/main/java/org/dspace/content/dao/pojo/ItemUuidAndRelationshipId.java new file mode 100644 index 000000000000..6668b0d211f0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/dao/pojo/ItemUuidAndRelationshipId.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.dao.pojo; + +import java.util.UUID; + +import org.dspace.content.Relationship; +import org.dspace.content.dao.RelationshipDAO; +import org.springframework.lang.NonNull; + +/** + * Used by {@link RelationshipDAO#findByLatestItemAndRelationshipType} to avoid creating {@link Relationship}s. + */ +public class ItemUuidAndRelationshipId { + + private final UUID itemUuid; + private final int relationshipId; + + public ItemUuidAndRelationshipId(@NonNull UUID itemUuid, @NonNull int relationshipId) { + this.itemUuid = itemUuid; + this.relationshipId = relationshipId; + } + + public UUID getItemUuid() { + return this.itemUuid; + } + + public int getRelationshipId() { + return this.relationshipId; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactory.java b/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactory.java index 4010e148610c..0b06b34038e1 100644 --- a/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactory.java +++ b/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactory.java @@ -31,8 +31,8 @@ import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.SiteService; -import org.dspace.content.service.SupervisedItemService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.eperson.service.SubscribeService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.workflow.factory.WorkflowServiceFactory; @@ -71,10 +71,10 @@ public abstract class ContentServiceFactory { public abstract InstallItemService getInstallItemService(); - public abstract SupervisedItemService getSupervisedItemService(); - public abstract SiteService getSiteService(); + public abstract SubscribeService getSubscribeService(); + /** * Return the implementation of the RelationshipTypeService interface * @@ -114,11 +114,7 @@ public InProgressSubmissionService getInProgressSubmissionService(InProgressSubm } public DSpaceObjectService getDSpaceObjectService(T dso) { - // No need to worry when supressing, as long as our "getDSpaceObjectManager" method is properly implemented - // no casting issues should occur - @SuppressWarnings("unchecked") - DSpaceObjectService manager = getDSpaceObjectService(dso.getType()); - return manager; + return getDSpaceObjectService(dso.getType()); } @SuppressWarnings("unchecked") diff --git a/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactoryImpl.java index 6f123ae1bac7..e970f0bdab12 100644 --- a/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/factory/ContentServiceFactoryImpl.java @@ -28,8 +28,8 @@ import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.SiteService; -import org.dspace.content.service.SupervisedItemService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.eperson.service.SubscribeService; import org.springframework.beans.factory.annotation.Autowired; /** @@ -68,10 +68,9 @@ public class ContentServiceFactoryImpl extends ContentServiceFactory { @Autowired(required = true) private InstallItemService installItemService; @Autowired(required = true) - private SupervisedItemService supervisedItemService; - @Autowired(required = true) private SiteService siteService; - + @Autowired(required = true) + private SubscribeService subscribeService; @Autowired(required = true) private RelationshipService relationshipService; @Autowired(required = true) @@ -149,13 +148,13 @@ public InstallItemService getInstallItemService() { } @Override - public SupervisedItemService getSupervisedItemService() { - return supervisedItemService; + public SiteService getSiteService() { + return siteService; } @Override - public SiteService getSiteService() { - return siteService; + public SubscribeService getSubscribeService() { + return subscribeService ; } @Override diff --git a/dspace-api/src/main/java/org/dspace/content/logic/DefaultFilter.java b/dspace-api/src/main/java/org/dspace/content/logic/DefaultFilter.java new file mode 100644 index 000000000000..1ac3930952af --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/DefaultFilter.java @@ -0,0 +1,58 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * The default filter, a very simple implementation of Filter / LogicalStatement + * The idea is to have this as a wrapper / root class for all logical operations, so it takes a single + * statement as a property (unlike an operator) and takes no parameters (unlike a condition) + * + * @author Kim Shepherd + */ +public class DefaultFilter implements Filter { + private LogicalStatement statement; + private String name; + private final static Logger log = LogManager.getLogger(); + + /** + * Set statement from Spring configuration in item-filters.xml + * Be aware that this is singular not plural. A filter can have one sub-statement only. + * + * @param statement LogicalStatement of this filter (operator, condition, or another filter) + */ + public void setStatement(LogicalStatement statement) { + this.statement = statement; + } + + /** + * Get the result of logical evaluation for an item + * @param context DSpace context + * @param item Item to evaluate + * @return boolean + * @throws LogicalStatementException + */ + public boolean getResult(Context context, Item item) throws LogicalStatementException { + return this.statement.getResult(context, item); + } + + @Override + public void setBeanName(String name) { + log.debug("Initialize bean " + name); + this.name = name; + } + + @Override + public String getName() { + return name; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/Filter.java b/dspace-api/src/main/java/org/dspace/content/logic/Filter.java new file mode 100644 index 000000000000..f789860e7743 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/Filter.java @@ -0,0 +1,43 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.springframework.beans.factory.BeanNameAware; + +/** + * The interface for Filter currently doesn't add anything to LogicalStatement but inherits from it + * just to keep naming / reflection clean, and in case Filters should do anything additional in future. + * We need this as filters have to be specified in the spring configuration (item-filters.xml). + * Filters are the top level elements of the logic. Only logical statements that implement this interface + * are allowed to be the root element of a spring configuration (item-filters.xml) of this logic framework. + * A filter is just helping to differentiate between logical statement that can be used as root elements and + * logical statement that shouldn't be use as root element. A filter may contain only one substatement. + * + * @author Kim Shepherd + * @see org.dspace.content.logic.DefaultFilter + */ +public interface Filter extends LogicalStatement, BeanNameAware { + /** + * Get the result of logical evaluation for an item + * @param context DSpace context + * @param item Item to evaluate + * @return boolean + * @throws LogicalStatementException + */ + @Override + boolean getResult(Context context, Item item) throws LogicalStatementException; + + /** + * Get the name of a filter. This can be used by filters which make use of BeanNameAware + * to return the bean name. + * @return the id/name of this spring bean + */ + String getName(); +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/FilterUtils.java b/dspace-api/src/main/java/org/dspace/content/logic/FilterUtils.java new file mode 100644 index 000000000000..a878d69e6ed8 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/FilterUtils.java @@ -0,0 +1,85 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic; + +import java.util.HashMap; +import java.util.Map; + +import org.dspace.identifier.DOI; +import org.dspace.identifier.Handle; +import org.dspace.identifier.Identifier; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * General utility methods for logical item filtering + * + * @author Kim Shepherd + */ +public class FilterUtils { + + @Autowired(required = true) + ConfigurationService configurationService; + + /** + * Get a Filter by configuration property name + * For example, if a module has implemented "my-feature.filter" configuration property + * this method will return a filter with the ID specified by the configuration property + * @param property DSpace configuration property name (Apache Commons config) + * @return Filter object, with a bean ID configured for this property key, or null + */ + public static Filter getFilterFromConfiguration(String property) { + String filterName = DSpaceServicesFactory.getInstance().getConfigurationService().getProperty(property); + if (filterName != null) { + return DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName(filterName, Filter.class); + } + return null; + } + + /** + * Get a Filter by configuration property name + * For example, if a module has implemented "my-feature.filter" configuration property + * this method will return a filter with the ID specified by the configuration property + * @param property DSpace configuration property name (Apache Commons config) + * @return Filter object, with a bean ID configured for this property key, or default filter + */ + public static Filter getFilterFromConfiguration(String property, Filter defaultFilter) { + Filter filter = getFilterFromConfiguration(property); + if (filter != null) { + return filter; + } + return defaultFilter; + } + + /** + * Get a map of identifier types and filters to use when creating workspace or archived items + * This is used by services installing new archived or workspace items to filter by identifier type + * as some filters should apply to DOI creation but not Handle creation, and so on. + * The in progress or archived status will be used to load the appropriate filter from configuration + *

    + * @param inProgress + * @return + */ + public static Map, Filter> getIdentifierFilters(boolean inProgress) { + String configurationSuffix = "install"; + if (inProgress) { + configurationSuffix = "workspace"; + } + Map, Filter> filters = new HashMap<>(); + // Put DOI 'can we create DOI on install / workspace?' filter + Filter filter = FilterUtils.getFilterFromConfiguration("identifiers.submission.filter." + configurationSuffix); + // A null filter should be handled safely by the identifier provier (default, or "always true") + filters.put(DOI.class, filter); + // This won't have an affect until handle providers implement filtering, but is an example of + // how the filters can be used for other types + filters.put(Handle.class, DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName( + "always_true_filter", TrueFilter.class)); + return filters; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/LogicalStatement.java b/dspace-api/src/main/java/org/dspace/content/logic/LogicalStatement.java new file mode 100644 index 000000000000..0119f48b51f9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/LogicalStatement.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic; + +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * The base interface used by all logic classes: all operators and conditions are logical statements. + * All statements must accept an Item object and return a boolean result. + * The philosophy is that because Filter, Condition, Operator classes implement getResult(), they can all be + * used as sub-statements in other Filters and Operators. + * + * @author Kim Shepherd + */ +public interface LogicalStatement { + /** + * Get the result of logical evaluation for an item + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation + * @throws LogicalStatementException + */ + boolean getResult(Context context, Item item) throws LogicalStatementException; +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/LogicalStatementException.java b/dspace-api/src/main/java/org/dspace/content/logic/LogicalStatementException.java new file mode 100644 index 000000000000..4e3b3e3b7d78 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/LogicalStatementException.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic; + +/** + * Exception for errors encountered while evaluating logical statements + * defined as spring beans. + * + * @author Kim Shepherd + */ +public class LogicalStatementException extends RuntimeException { + + public LogicalStatementException() { + super(); + } + + public LogicalStatementException(String s, Throwable t) { + super(s, t); + } + + public LogicalStatementException(String s) { + super(s); + } + + public LogicalStatementException(Throwable t) { + super(t); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/TestLogicRunner.java b/dspace-api/src/main/java/org/dspace/content/logic/TestLogicRunner.java new file mode 100644 index 000000000000..bf218eaa8a0f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/TestLogicRunner.java @@ -0,0 +1,142 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic; + +import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.cli.PosixParser; +import org.apache.logging.log4j.Logger; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * A command-line runner used for testing a logical filter against an item, or all items + * + * @author Kim Shepherd + */ +public class TestLogicRunner { + + private static Logger log = org.apache.logging.log4j.LogManager.getLogger(TestLogicRunner.class); + + /** + * Default constructor + */ + private TestLogicRunner() { } + + /** + * Main runner method for CLI usage + * @param argv array of command-line arguments + */ + public static void main(String[] argv) { + System.out.println("Starting impl of main() test spring logic item filter"); + + // initialize options + Options options = new Options(); + + options.addOption("h", "help", false, "Help"); + options.addOption("l", "list", false, "List filters"); + options.addOption("f", "filter", true, "Use filter "); + options.addOption("i","item", true, "Run filter over item "); + options.addOption("a","all", false, "Run filter over all items"); + + // initialize parser + CommandLineParser parser = new PosixParser(); + CommandLine line = null; + HelpFormatter helpformater = new HelpFormatter(); + + try { + line = parser.parse(options, argv); + } catch (ParseException ex) { + System.out.println(ex.getMessage()); + System.exit(1); + } + + if (line.hasOption("help")) { + helpformater.printHelp("\nTest the DSpace logical item filters\n", options); + System.exit(0); + } + + // Create a context + Context c = new Context(Context.Mode.READ_ONLY); + //c.turnOffAuthorisationSystem(); + ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); + + if (line.hasOption("list")) { + // Lit filters and exit + List filters = manager.getServicesByType(Filter.class); + for (Filter filter : filters) { + System.out.println(filter.getClass().toString()); + } + System.out.println("See item-filters.xml spring config for filter names"); + System.exit(0); + } + + Filter filter; + + if (line.hasOption("filter")) { + String filterName = line.getOptionValue("filter"); + filter = manager.getServiceByName(filterName, Filter.class); + if (filter == null) { + System.out.println("Error loading filter: " + filterName); + System.exit(1); + } + + if (line.hasOption("item")) { + String handle = line.getOptionValue("item"); + + HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + try { + DSpaceObject dso = handleService.resolveToObject(c, handle); + if (Constants.typeText[dso.getType()].equals("ITEM")) { + Item item = (Item) dso; + System.out.println(filter.getResult(c, item)); + } else { + System.out.println(handle + " is not an ITEM"); + } + } catch (SQLException | LogicalStatementException e) { + System.out.println("Error encountered processing item " + handle + ": " + e.getMessage()); + } + + } else if (line.hasOption("all")) { + ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + try { + Iterator itemIterator = itemService.findAll(c); + while (itemIterator.hasNext()) { + Item i = itemIterator.next(); + System.out.println( + "Testing '" + filter + "' on item " + i.getHandle() + " ('" + i.getName() + "')" + ); + System.out.println(filter.getResult(c, i)); + + } + } catch (SQLException | LogicalStatementException e) { + System.out.println("Error encountered processing items: " + e.getMessage()); + } + } else { + helpformater.printHelp("\nTest the DSpace logical item filters\n", options); + } + } + + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/logic/TrueFilter.java b/dspace-api/src/main/java/org/dspace/content/logic/TrueFilter.java new file mode 100644 index 000000000000..b15ab4eaaafe --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/TrueFilter.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * Extremely simple filter that always returns true! + * Useful to pass to methods that expect a filter, in order to effectively say "all items". + * This could be configured in Spring XML but it is more stable and reliable to have it hard-coded here + * so that any broken configuration doesn't silently break parts of DSpace that expect it to work. + * + * @author Kim Shepherd + */ +public class TrueFilter implements Filter { + private String name; + private final static Logger log = LogManager.getLogger(); + + public boolean getResult(Context context, Item item) throws LogicalStatementException { + return true; + } + + @Override + public void setBeanName(String name) { + log.debug("Initialize bean " + name); + this.name = name; + } + + @Override + public String getName() { + return name; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java new file mode 100644 index 000000000000..ce5b274a8df0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/AbstractCondition.java @@ -0,0 +1,89 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.condition; + +import java.util.Map; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.handle.service.HandleService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Abstract class for conditions, to implement the basic getter and setter parameters + * + * @author Kim Shepherd + */ +public abstract class AbstractCondition implements Condition { + + // Parameters map (injected, required -- see setter annotation) + private Map parameters; + + // Declare and instantiate spring services + @Autowired(required = true) + protected ItemService itemService; + @Autowired(required = true) + protected CollectionService collectionService; + @Autowired(required = true) + protected HandleService handleService; + + // Logging + Logger log = LogManager.getLogger(AbstractCondition.class); + + /** + * Get parameters set by spring configuration in item-filters.xml + * These could be any kind of map that the extending condition class needs for evaluation + * @return map of parameters + * @throws LogicalStatementException + */ + @Override + public Map getParameters() throws LogicalStatementException { + return this.parameters; + } + + /** + * Set parameters - used by Spring when creating beans from item-filters.xml + * These could be any kind of map that the extending condition class needs for evaluation + * @param parameters + * @throws LogicalStatementException + */ + @Autowired(required = true) + @Override + public void setParameters(Map parameters) throws LogicalStatementException { + this.parameters = parameters; + } + + /** + * Get the result of logical evaluation for an item + * @param context DSpace context + * @param item Item to evaluate + * @return boolean + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + if (item == null) { + log.error("Error evaluating item. Passed item is null, returning false"); + return false; + } + if (context == null) { + throw new IllegalStateException("Context is null"); + } + return true; + } + + @Override + public void setItemService(ItemService itemService) { + this.itemService = itemService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/BitstreamCountCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/BitstreamCountCondition.java new file mode 100644 index 000000000000..36e506122e6f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/BitstreamCountCondition.java @@ -0,0 +1,71 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.condition; + +import java.util.List; + +import org.dspace.content.Bundle; +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * A condition to evaluate an item based on how many bitstreams it has in a particular bundle + * + * @author Kim Shepherd + */ +public class BitstreamCountCondition extends AbstractCondition { + /** + * Return true if bitstream count is within bounds of min and/or max parameters + * Return false if out of bounds + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + + // This super call just throws some useful exceptions if required objects are null + super.getResult(context, item); + + int min = -1; + if (getParameters().get("min") != null) { + min = Integer.parseInt((String)getParameters().get("min")); + } + int max = -1; + if (getParameters().get("max") != null) { + max = Integer.parseInt((String)getParameters().get("max")); + } + String bundleName = (String)getParameters().get("bundle"); + if (min < 0 && max < 0) { + throw new LogicalStatementException("Either min or max parameter must be 0 or bigger."); + } + + List bundles; + int count = 0; + + if (bundleName != null) { + bundles = item.getBundles(bundleName); + } else { + bundles = item.getBundles(); + } + + for (Bundle bundle : bundles) { + count += bundle.getBitstreams().size(); + } + + if (min < 0) { + return (count <= max); + } + if (max < 0) { + return (count >= min); + } + return (count <= max && count >= min); + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/Condition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/Condition.java new file mode 100644 index 000000000000..7647dce4a4a4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/Condition.java @@ -0,0 +1,55 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.condition; + +import java.util.Map; + +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatement; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; + +/** + * The Condition interface + * + * A condition is one logical statement testing an item for any idea. A condition is always a logical statements. An + * operator is not a condition but also a logical statement. + * + * @author Kim Shepherd + */ +public interface Condition extends LogicalStatement { + + /** + * Set parameters - used by Spring + * @param parameters + * @throws LogicalStatementException + */ + void setParameters(Map parameters) throws LogicalStatementException; + + /** + * Get parameters set by Spring in item-filters.xml + * These could be any kind of map that the extending condition class needs for evaluation + * @return map of parameters + * @throws LogicalStatementException + */ + Map getParameters() throws LogicalStatementException; + + /** + * Get the result of logical evaluation for an item + * @param context DSpace context + * @param item Item to evaluate + * @return result + * @throws LogicalStatementException + */ + @Override + boolean getResult(Context context, Item item) throws LogicalStatementException; + + public void setItemService(ItemService itemService); + +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/InCollectionCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/InCollectionCondition.java new file mode 100644 index 000000000000..df94f183d190 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/InCollectionCondition.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.condition; + +import java.sql.SQLException; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * A condition that accepts a list of collection handles and returns true + * if the item belongs to any of them. + * + * @author Kim Shepherd + */ +public class InCollectionCondition extends AbstractCondition { + private static Logger log = LogManager.getLogger(InCollectionCondition.class); + + /** + * Return true if item is in one of the specified collections + * Return false if not + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + + List collectionHandles = (List)getParameters().get("collections"); + + // Look for the handle among an archived item's collections - this test will only work after submission + // and archival is complete + List itemCollections = item.getCollections(); + for (Collection collection : itemCollections) { + if (collectionHandles.contains(collection.getHandle())) { + log.debug("item " + item.getHandle() + " is in collection " + + collection.getHandle() + ", returning true"); + return true; + } + } + + // Look for the parent object of the item. This is important as the item.getOwningCollection method + // may return null, even though the item itself does have a parent object, at the point of archival + try { + DSpaceObject parent = itemService.getParentObject(context, item); + if (parent != null) { + log.debug("Got parent DSO for item: " + parent.getID().toString()); + log.debug("Parent DSO handle: " + parent.getHandle()); + if (collectionHandles.contains(parent.getHandle())) { + log.debug("item " + item.getHandle() + " is in collection " + + parent.getHandle() + ", returning true"); + return true; + } + } else { + log.debug("Parent DSO is null..."); + } + } catch (SQLException e) { + log.error("Error obtaining parent DSO", e); + throw new LogicalStatementException(e); + } + + // If we reach this statement, the item did not appear in any of the collections from the parameters + log.debug("item " + item.getHandle() + " not found in the passed collection handle list"); + + return false; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/InCommunityCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/InCommunityCondition.java new file mode 100644 index 000000000000..6a72011e7336 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/InCommunityCondition.java @@ -0,0 +1,90 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.condition; + +import java.sql.SQLException; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * A condition that accepts a list of community handles and returns true + * if the item belongs to any of them. + * + * @author Kim Shepherd + */ +public class InCommunityCondition extends AbstractCondition { + private final static Logger log = LogManager.getLogger(); + + /** + * Return true if item is in one of the specified collections + * Return false if not + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + + List communityHandles = (List)getParameters().get("communities"); + List itemCollections = item.getCollections(); + + // Check communities of item.getCollections() - this will only see collections if the item is archived + for (Collection collection : itemCollections) { + try { + List communities = collection.getCommunities(); + for (Community community : communities) { + if (communityHandles.contains(community.getHandle())) { + return true; + } + } + } catch (SQLException e) { + log.error(e.getMessage()); + throw new LogicalStatementException(e); + } + } + + // Look for the parent object of the item. This is important as the item.getOwningCollection method + // may return null, even though the item itself does have a parent object, at the point of archival + try { + DSpaceObject parent = itemService.getParentObject(context, item); + if (parent instanceof Collection) { + log.debug("Got parent DSO for item: " + parent.getID().toString()); + log.debug("Parent DSO handle: " + parent.getHandle()); + try { + // Now iterate communities of this parent collection + Collection collection = (Collection)parent; + List communities = collection.getCommunities(); + for (Community community : communities) { + if (communityHandles.contains(community.getHandle())) { + return true; + } + } + } catch (SQLException e) { + log.error(e.getMessage()); + throw new LogicalStatementException(e); + } + } else { + log.debug("Parent DSO is null or is not a Collection..."); + } + } catch (SQLException e) { + log.error("Error obtaining parent DSO", e); + throw new LogicalStatementException(e); + } + + return false; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/IsArchivedCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/IsArchivedCondition.java new file mode 100644 index 000000000000..4f50d2b6f69f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/IsArchivedCondition.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.condition; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * A condition that returns true if the item is archived + * + * @author Kim Shepherd + */ +public class IsArchivedCondition extends AbstractCondition { + private final static Logger log = LogManager.getLogger(); + + /** + * Return true if item is archived + * Return false if not + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + log.debug("Result of isArchived is " + item.isArchived()); + return item.isArchived(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/IsWithdrawnCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/IsWithdrawnCondition.java new file mode 100644 index 000000000000..850b69bda0d8 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/IsWithdrawnCondition.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.condition; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * A condition that returns true if the item is withdrawn + * + * @author Kim Shepherd + */ +public class IsWithdrawnCondition extends AbstractCondition { + private final static Logger log = LogManager.getLogger(); + + /** + * Return true if item is withdrawn + * Return false if not + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + log.debug("Result of isWithdrawn is " + item.isWithdrawn()); + return item.isWithdrawn(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValueMatchCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValueMatchCondition.java new file mode 100644 index 000000000000..e87c479de6b5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValueMatchCondition.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.condition; + +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * A condition that returns true if a pattern (regex) matches any value + * in a given metadata field + * + * @author Kim Shepherd + */ +public class MetadataValueMatchCondition extends AbstractCondition { + + private final static Logger log = LogManager.getLogger(); + + /** + * Return true if any value for a specified field in the item matches a specified regex pattern + * Return false if not + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + String field = (String)getParameters().get("field"); + if (field == null) { + return false; + } + + String[] fieldParts = field.split("\\."); + String schema = (fieldParts.length > 0 ? fieldParts[0] : null); + String element = (fieldParts.length > 1 ? fieldParts[1] : null); + String qualifier = (fieldParts.length > 2 ? fieldParts[2] : null); + + List values = itemService.getMetadata(item, schema, element, qualifier, Item.ANY); + for (MetadataValue value : values) { + if (getParameters().get("pattern") instanceof String) { + String pattern = (String)getParameters().get("pattern"); + log.debug("logic for " + item.getHandle() + ": pattern passed is " + pattern + + ", checking value " + value.getValue()); + Pattern p = Pattern.compile(pattern); + Matcher m = p.matcher(value.getValue()); + if (m.find()) { + return true; + } + } + } + return false; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValuesMatchCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValuesMatchCondition.java new file mode 100644 index 000000000000..c6ca9dfb9fa3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/MetadataValuesMatchCondition.java @@ -0,0 +1,73 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.condition; + +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * A condition that returns true if any pattern in a list of patterns matches any value + * in a given metadata field + * + * @author Kim Shepherd + */ +public class MetadataValuesMatchCondition extends AbstractCondition { + + private final static Logger log = LogManager.getLogger(); + + /** + * Return true if any value for a specified field in the item matches any of the specified regex patterns + * Return false if not + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + String field = (String)getParameters().get("field"); + if (field == null) { + return false; + } + + String[] fieldParts = field.split("\\."); + String schema = (fieldParts.length > 0 ? fieldParts[0] : null); + String element = (fieldParts.length > 1 ? fieldParts[1] : null); + String qualifier = (fieldParts.length > 2 ? fieldParts[2] : null); + + List values = itemService.getMetadata(item, schema, element, qualifier, Item.ANY); + for (MetadataValue value : values) { + if (getParameters().get("patterns") instanceof List) { + List patternList = (List)getParameters().get("patterns"); + // If the list is empty, just return true and log error? + log.error("No patterns were passed for metadata value matching, defaulting to 'true'"); + if (patternList == null) { + return true; + } + for (String pattern : patternList) { + log.debug("logic for " + item.getHandle() + ": pattern passed is " + pattern + + ", checking value " + value.getValue()); + Pattern p = Pattern.compile(pattern); + Matcher m = p.matcher(value.getValue()); + if (m.find()) { + return true; + } + } + } + } + return false; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/condition/ReadableByGroupCondition.java b/dspace-api/src/main/java/org/dspace/content/logic/condition/ReadableByGroupCondition.java new file mode 100644 index 000000000000..20138beb47ef --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/condition/ReadableByGroupCondition.java @@ -0,0 +1,64 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.condition; + +import java.sql.SQLException; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Constants; +import org.dspace.core.Context; + +/** + * A condition that accepts a group and action parameter and returns true if the group + * can perform the action on a given item + * + * @author Kim Shepherd + */ +public class ReadableByGroupCondition extends AbstractCondition { + private final static Logger log = LogManager.getLogger(); + + // Authorize service + AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + + /** + * Return true if this item allows a specified action (eg READ, WRITE, ADD) by a specified group + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + + String group = (String)getParameters().get("group"); + String action = (String)getParameters().get("action"); + + try { + List policies = authorizeService + .getPoliciesActionFilter(context, item, Constants.getActionID(action)); + for (ResourcePolicy policy : policies) { + if (policy.getGroup().getName().equals(group)) { + return true; + } + } + } catch (SQLException e) { + log.error("Error trying to read policies for " + item.getHandle() + ": " + e.getMessage()); + throw new LogicalStatementException(e); + } + log.debug("item " + item.getHandle() + " not readable by anonymous group"); + + return false; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/operator/AbstractOperator.java b/dspace-api/src/main/java/org/dspace/content/logic/operator/AbstractOperator.java new file mode 100644 index 000000000000..3882414def9b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/operator/AbstractOperator.java @@ -0,0 +1,70 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.operator; + +import java.util.ArrayList; +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatement; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * Abstract class for an operator. + * An operator contains a list of logical statements (conditions or more operators) and depending on the kind + * of operator (AND, OR, NOT, etc.) the results of some or all sub-statements are evaluated and returned + * as a logical result + * + * @author Kim Shepherd + */ +public abstract class AbstractOperator implements LogicalStatement { + + private List statements = new ArrayList<>(); + + /** + * Get sub-statements for this operator + * @return list of sub-statements + */ + public List getStatements() { + return statements; + } + + /** + * Set sub-statements for this operator, as defined in item-filters.xml + * @param statements list of logical statements + */ + public void setStatements(List statements) { + this.statements = statements; + } + + /** + * Default constructor + */ + public AbstractOperator() {} + + /** + * Constructor to create operator from some predefined statements + * @param statements + */ + public AbstractOperator(List statements) { + this.statements = statements; + } + + /** + * + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of evaluation (of sub-statements) + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + return false; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/operator/And.java b/dspace-api/src/main/java/org/dspace/content/logic/operator/And.java new file mode 100644 index 000000000000..79bc5c381e4f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/operator/And.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.operator; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatement; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * An operator that implements AND by evaluating sub-statements and only returning + * true if all sub-statements return true + * + * @author Kim Shepherd + */ +public class And extends AbstractOperator { + + /** + * Default constructor + */ + public And() { + super(); + } + + /** + * Constructor that accepts predefined list of statements as defined in item-filters.xml + * @param statements List of logical statements + */ + And(List statements) { + super(statements); + } + + /** + * Return true if ALL statements return true + * Return false otherwise + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of AND + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + + for (LogicalStatement statement : getStatements()) { + if (!statement.getResult(context, item)) { + return false; + } + } + + return true; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/operator/Nand.java b/dspace-api/src/main/java/org/dspace/content/logic/operator/Nand.java new file mode 100644 index 000000000000..2a4b6823b6c9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/operator/Nand.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.operator; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatement; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * An operator that implements NAND by negating an AND operation + * + * @author Kim Shepherd + */ +public class Nand extends AbstractOperator { + + /** + * Default constructor + */ + public Nand() { + super(); + } + + /** + * Constructor that accepts predefined list of statements as defined in item-filters.xml + * @param statements List of logical statements + */ + public Nand(List statements) { + super(statements); + } + + /** + * Return true if the result of AND'ing all sub-statements is false (ie. a NOT(AND()) + * Return false if not + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of NAND + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + return !(new And(getStatements()).getResult(context, item)); + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/operator/Nor.java b/dspace-api/src/main/java/org/dspace/content/logic/operator/Nor.java new file mode 100644 index 000000000000..d312734fbf46 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/operator/Nor.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.operator; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatement; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * An operator that implements NOR by negating an OR operation. + * + * @author Kim Shepherd + */ +public class Nor extends AbstractOperator { + + /** + * Default constructor + */ + public Nor() { + super(); + } + + /** + * Constructor that accepts predefined list of statements as defined in item-filters.xml + * @param statements List of logical statements + */ + public Nor(List statements) { + super(statements); + } + + /** + * Return true if the result of OR'ing the sub-statements is false + * Return false otherwise + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of NOR + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + return !(new Or(getStatements()).getResult(context, item)); + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/operator/Not.java b/dspace-api/src/main/java/org/dspace/content/logic/operator/Not.java new file mode 100644 index 000000000000..277acdfd0153 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/operator/Not.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.operator; + +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatement; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * An operator that implements NOT by simply negating a statement + * Note that this operator doesn't actually implement the 'AbstractOperator' interface because + * we only want one sub-statement. So it's actually just a simple implementation of LogicalStatement. + * Not can have one sub-statement only, while and, or, nor, ... can have multiple sub-statements. + * + * @author Kim Shepherd + */ +public class Not implements LogicalStatement { + + private LogicalStatement statement; + + /** + * Get sub-statement (note: singular! even though we keep the method name) for this operator + * @return list of sub-statements + */ + public LogicalStatement getStatements() { + return statement; + } + + /** + * Set sub-statement (note: singular!) for this operator, as defined in item-filters.xml + * @param statement a single statement to apply to NOT operation + */ + public void setStatements(LogicalStatement statement) { + this.statement = statement; + } + + /** + * Default constructor + */ + public Not() {} + + /** + * Constructor that accepts predefined list of statements as defined in item-filters.xml + * @param statement Single logical statement + */ + public Not(LogicalStatement statement) { + this.statement = statement; + } + + /** + * Return true if the result of the sub-statement is false + * Return false otherwise + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of NOT + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + return !statement.getResult(context, item); + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/logic/operator/Or.java b/dspace-api/src/main/java/org/dspace/content/logic/operator/Or.java new file mode 100644 index 000000000000..e5697f8cc34c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/logic/operator/Or.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic.operator; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.logic.LogicalStatement; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.core.Context; + +/** + * An operator that implements OR by evaluating sub-statements and returns + * true if one or more sub-statements return true + * + * @author Kim Shepherd + */ +public class Or extends AbstractOperator { + + /** + * Default constructor + */ + public Or() { + super(); + } + + /** + * Constructor that accepts predefined list of statements as defined in item-filters.xml + * @param statements List of logical statements + */ + public Or(List statements) { + super(statements); + } + + /** + * Return true if any sub-statement returns true + * Return false otherwise + * @param context DSpace context + * @param item Item to evaluate + * @return boolean result of OR + * @throws LogicalStatementException + */ + @Override + public boolean getResult(Context context, Item item) throws LogicalStatementException { + + for (LogicalStatement statement : getStatements()) { + if (statement.getResult(context, item)) { + return true; + } + } + + return false; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java b/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java index 97d674fd1260..685fd9000da8 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSDisseminator.java @@ -14,9 +14,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.io.UnsupportedEncodingException; import java.lang.reflect.InvocationTargetException; -import java.net.URLEncoder; import java.sql.SQLException; import java.util.HashMap; import java.util.Iterator; @@ -75,7 +73,7 @@ import org.dspace.content.service.SiteService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.core.Utils; import org.dspace.core.factory.CoreServiceFactory; import org.dspace.core.service.PluginService; @@ -83,10 +81,10 @@ import org.dspace.license.service.CreativeCommonsService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; /** * Base class for disseminator of @@ -265,7 +263,7 @@ public void disseminate(Context context, DSpaceObject dso, } //end if/else // Assuming no errors, log this dissemination - log.info(LogManager.getHeader(context, "package_disseminate", + log.info(LogHelper.getHeader(context, "package_disseminate", "Disseminated package file=" + pkgFile.getName() + " for Object, type=" + Constants.typeText[dso.getType()] + ", handle=" @@ -328,45 +326,43 @@ protected void writeZipPackage(Context context, DSpaceObject dso, Mets manifest = makeManifest(context, dso, params, extraStreams); // copy extra (metadata, license, etc) bitstreams into zip, update manifest - if (extraStreams != null) { - for (Map.Entry ment : extraStreams.getMap().entrySet()) { - MdRef ref = ment.getKey(); - - // Both Deposit Licenses & CC Licenses which are referenced as "extra streams" may already be - // included in our Package (if their bundles are already included in the section of manifest). - // So, do a special check to see if we need to link up extra License entries to the bitstream - // in the . - // (this ensures that we don't accidentally add the same License file to our package twice) - linkLicenseRefsToBitstreams(context, params, dso, ref); - - //If this 'mdRef' is NOT already linked up to a file in the package, - // then its file must be missing. So, we are going to add a new - // file to the Zip package. - if (ref.getXlinkHref() == null || ref.getXlinkHref().isEmpty()) { - InputStream is = ment.getValue(); - - // create a hopefully unique filename within the Zip - String fname = gensym("metadata"); - // link up this 'mdRef' to point to that file - ref.setXlinkHref(fname); - if (log.isDebugEnabled()) { - log.debug("Writing EXTRA stream to Zip: " + fname); - } - //actually add the file to the Zip package - ZipEntry ze = new ZipEntry(fname); - if (lmTime != 0) { - ze.setTime(lmTime); - } else { - // Set a default modified date so that checksum of Zip doesn't change if Zip contents are - // unchanged - ze.setTime(DEFAULT_MODIFIED_DATE); - } - zip.putNextEntry(ze); - Utils.copy(is, zip); - zip.closeEntry(); - - is.close(); + for (Map.Entry ment : extraStreams.getMap().entrySet()) { + MdRef ref = ment.getKey(); + + // Both Deposit Licenses & CC Licenses which are referenced as "extra streams" may already be + // included in our Package (if their bundles are already included in the section of manifest). + // So, do a special check to see if we need to link up extra License entries to the bitstream + // in the . + // (this ensures that we don't accidentally add the same License file to our package twice) + linkLicenseRefsToBitstreams(context, params, dso, ref); + + //If this 'mdRef' is NOT already linked up to a file in the package, + // then its file must be missing. So, we are going to add a new + // file to the Zip package. + if (ref.getXlinkHref() == null || ref.getXlinkHref().isEmpty()) { + InputStream is = ment.getValue(); + + // create a hopefully unique filename within the Zip + String fname = gensym("metadata"); + // link up this 'mdRef' to point to that file + ref.setXlinkHref(fname); + if (log.isDebugEnabled()) { + log.debug("Writing EXTRA stream to Zip: " + fname); + } + //actually add the file to the Zip package + ZipEntry ze = new ZipEntry(fname); + if (lmTime != 0) { + ze.setTime(lmTime); + } else { + // Set a default modified date so that checksum of Zip doesn't change if Zip contents are + // unchanged + ze.setTime(DEFAULT_MODIFIED_DATE); } + zip.putNextEntry(ze); + Utils.copy(is, zip); + zip.closeEntry(); + + is.close(); } } @@ -467,17 +463,17 @@ protected void addBitstreamsToZip(Context context, DSpaceObject dso, Utils.copy(input, zip); input.close(); } else { - log.warn("Adding zero-length file for Bitstream, SID=" - + String.valueOf(bitstream.getSequenceID()) + log.warn("Adding zero-length file for Bitstream, uuid=" + + String.valueOf(bitstream.getID()) + ", not authorized for READ."); } zip.closeEntry(); } else if (unauth != null && unauth.equalsIgnoreCase("skip")) { - log.warn("Skipping Bitstream, SID=" + String - .valueOf(bitstream.getSequenceID()) + ", not authorized for READ."); + log.warn("Skipping Bitstream, uuid=" + String + .valueOf(bitstream.getID()) + ", not authorized for READ."); } else { throw new AuthorizeException( - "Not authorized to read Bitstream, SID=" + String.valueOf(bitstream.getSequenceID())); + "Not authorized to read Bitstream, uuid=" + String.valueOf(bitstream.getID())); } } } @@ -777,9 +773,6 @@ protected Mets makeManifest(Context context, DSpaceObject dso, Mets mets = new Mets(); String identifier = "DB-ID-" + dso.getID(); - if (dso.getHandle() != null) { - identifier = dso.getHandle().replace('/', '-'); - } // this ID should be globally unique (format: DSpace_[objType]_[handle with slash replaced with a dash]) mets.setID("DSpace_" + Constants.typeText[dso.getType()] + "_" + identifier); @@ -901,12 +894,12 @@ protected Mets makeManifest(Context context, DSpaceObject dso, continue; } else if (!(unauth != null && unauth.equalsIgnoreCase("zero"))) { throw new AuthorizeException( - "Not authorized to read Bitstream, SID=" + String.valueOf(bitstream.getSequenceID())); + "Not authorized to read Bitstream, uuid=" + String.valueOf(bitstream.getID())); } } - String sid = String.valueOf(bitstream.getSequenceID()); - String fileID = bitstreamIDstart + sid; + String uuid = String.valueOf(bitstream.getID()); + String fileID = bitstreamIDstart + uuid; edu.harvard.hul.ois.mets.File file = new edu.harvard.hul.ois.mets.File(); file.setID(fileID); file.setSEQ(bitstream.getSequenceID()); @@ -929,7 +922,7 @@ protected Mets makeManifest(Context context, DSpaceObject dso, * extracted text or a thumbnail, so we use the name to work * out which bitstream to be in the same group as */ - String groupID = "GROUP_" + bitstreamIDstart + sid; + String groupID = "GROUP_" + bitstreamIDstart + uuid; if ((bundle.getName() != null) && (bundle.getName().equals("THUMBNAIL") || bundle.getName().startsWith("TEXT"))) { @@ -939,7 +932,7 @@ protected Mets makeManifest(Context context, DSpaceObject dso, bitstream); if (original != null) { groupID = "GROUP_" + bitstreamIDstart - + original.getSequenceID(); + + String.valueOf(original.getID()); } } file.setGROUPID(groupID); @@ -1408,7 +1401,7 @@ public String makeBitstreamURL(Context context, Bitstream bitstream, PackagePara // if bare manifest, use external "persistent" URI for bitstreams if (params != null && (params.getBooleanProperty("manifestOnly", false))) { // Try to build a persistent(-ish) URI for bitstream - // Format: {site-base-url}/bitstream/{item-handle}/{sequence-id}/{bitstream-name} + // Format: {site-ui-url}/bitstreams/{bitstream-uuid} try { // get handle of parent Item of this bitstream, if there is one: String handle = null; @@ -1419,26 +1412,13 @@ public String makeBitstreamURL(Context context, Bitstream bitstream, PackagePara handle = bi.get(0).getHandle(); } } - if (handle != null) { - return configurationService - .getProperty("dspace.ui.url") - + "/bitstream/" - + handle - + "/" - + String.valueOf(bitstream.getSequenceID()) - + "/" - + URLEncoder.encode(bitstream.getName(), "UTF-8"); - } else { //no Handle assigned, so persistent(-ish) URI for bitstream is - // Format: {site-base-url}/retrieve/{bitstream-internal-id} - return configurationService - .getProperty("dspace.ui.url") - + "/retrieve/" - + String.valueOf(bitstream.getID()); - } + return configurationService + .getProperty("dspace.ui.url") + + "/bitstreams/" + + String.valueOf(bitstream.getID()) + + "/download"; } catch (SQLException e) { log.error("Database problem", e); - } catch (UnsupportedEncodingException e) { - log.error("Unknown character set", e); } // We should only get here if we failed to build a nice URL above diff --git a/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSIngester.java b/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSIngester.java index 0db9a98b9a31..98277c4f9c06 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSIngester.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/AbstractMETSIngester.java @@ -16,6 +16,7 @@ import java.sql.SQLException; import java.util.Iterator; import java.util.List; +import java.util.UUID; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; @@ -43,14 +44,14 @@ import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.workflow.WorkflowException; import org.dspace.workflow.factory.WorkflowServiceFactory; -import org.jdom.Element; +import org.jdom2.Element; /** * Base class for package ingester of METS (Metadata Encoding and Transmission @@ -210,7 +211,7 @@ public DSpaceObject ingest(Context context, DSpaceObject parent, DSpaceObject dso = null; try { - log.info(LogManager.getHeader(context, "package_parse", + log.info(LogHelper.getHeader(context, "package_parse", "Parsing package for ingest, file=" + pkgFile.getName())); // Parse our ingest package, extracting out the METS manifest in the @@ -257,7 +258,7 @@ public DSpaceObject ingest(Context context, DSpaceObject parent, if (params.restoreModeEnabled()) { action = "package_restore"; } - log.info(LogManager.getHeader(context, action, + log.info(LogHelper.getHeader(context, action, "Created new Object, type=" + Constants.typeText[dso.getType()] + ", handle=" + dso.getHandle() + ", dbID=" @@ -387,7 +388,7 @@ protected DSpaceObject ingestObject(Context context, DSpaceObject parent, //If user specified to skip item ingest if any "missing parent" error message occur if (params.getBooleanProperty("skipIfParentMissing", false)) { //log a warning instead of throwing an error - log.warn(LogManager.getHeader(context, "package_ingest", + log.warn(LogHelper.getHeader(context, "package_ingest", "SKIPPING ingest of object '" + manifest.getObjID() + "' as parent DSpace Object could not be found. " + "If you are running a recursive ingest, it is likely this " + @@ -409,6 +410,7 @@ protected DSpaceObject ingestObject(Context context, DSpaceObject parent, // get handle from manifest handle = getObjectHandle(manifest); } + UUID uuid = getObjectID(manifest); // -- Step 2 -- // Create our DSpace Object based on info parsed from manifest, and @@ -416,7 +418,7 @@ protected DSpaceObject ingestObject(Context context, DSpaceObject parent, DSpaceObject dso; try { dso = PackageUtils.createDSpaceObject(context, parent, - type, handle, params); + type, handle, uuid, params); } catch (SQLException sqle) { throw new PackageValidationException("Exception while ingesting " + pkgFile.getPath(), sqle); @@ -727,7 +729,6 @@ protected void addBitstreams(Context context, Item item, // retrieve path/name of file in manifest String path = METSManifest.getFileName(mfile); - // extract the file input stream from package (or retrieve // externally, if it is an externally referenced file) InputStream fileStream = getFileInputStream(pkgFile, params, path); @@ -1025,7 +1026,7 @@ public DSpaceObject replace(Context context, DSpaceObject dsoToReplace, DSpaceObject dso = null; try { - log.info(LogManager.getHeader(context, "package_parse", + log.info(LogHelper.getHeader(context, "package_parse", "Parsing package for replace, file=" + pkgFile.getName())); // Parse our ingest package, extracting out the METS manifest in the @@ -1077,7 +1078,7 @@ public DSpaceObject replace(Context context, DSpaceObject dsoToReplace, //if ingestion was successful if (dso != null) { // Log that we created an object - log.info(LogManager.getHeader(context, "package_replace", + log.info(LogHelper.getHeader(context, "package_replace", "Created new Object, type=" + Constants.typeText[dso.getType()] + ", handle=" + dso.getHandle() + ", dbID=" @@ -1093,7 +1094,7 @@ public DSpaceObject replace(Context context, DSpaceObject dsoToReplace, params, null); // Log that we replaced an object - log.info(LogManager.getHeader(context, "package_replace", + log.info(LogHelper.getHeader(context, "package_replace", "Replaced Object, type=" + Constants.typeText[dso.getType()] + ", handle=" + dso.getHandle() + ", dbID=" @@ -1506,4 +1507,22 @@ public abstract void finishBitstream(Context context, Bitstream bs, */ public abstract String getConfigurationName(); + public UUID getObjectID(METSManifest manifest) + throws PackageValidationException { + Element mets = manifest.getMets(); + String idStr = mets.getAttributeValue("ID"); + if (idStr == null || idStr.length() == 0) { + throw new PackageValidationException("Manifest is missing the required mets@ID attribute."); + } + if (idStr.contains("DB-ID-")) { + idStr = idStr.substring(idStr.lastIndexOf("DB-ID-") + 6, idStr.length()); + } + try { + return UUID.fromString(idStr); + } catch (IllegalArgumentException ignored) { + //do nothing + } + return null; + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/packager/AbstractPackageIngester.java b/dspace-api/src/main/java/org/dspace/content/packager/AbstractPackageIngester.java index da58f1cf0bad..f66b2f2b7672 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/AbstractPackageIngester.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/AbstractPackageIngester.java @@ -27,7 +27,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; import org.dspace.workflow.WorkflowException; @@ -147,7 +147,7 @@ public List ingestAll(Context context, DSpaceObject parent, File pkgFile //if we are skipping over (i.e. keeping) existing objects if (params.keepExistingModeEnabled()) { - log.warn(LogManager.getHeader(context, "skip_package_ingest", + log.warn(LogHelper.getHeader(context, "skip_package_ingest", "Object already exists, package-skipped=" + pkgFile.getName())); } else { // Pass this exception on -- which essentially causes a full rollback of all changes (this is @@ -156,7 +156,7 @@ public List ingestAll(Context context, DSpaceObject parent, File pkgFile } } } else { - log.info(LogManager.getHeader(context, "skip_package_ingest", + log.info(LogHelper.getHeader(context, "skip_package_ingest", "Object was already ingested, package-skipped=" + pkgFile.getName())); } @@ -274,7 +274,7 @@ public List replaceAll(Context context, DSpaceObject dso, // the object to be replaced from the package itself. replacedDso = replace(context, dso, pkgFile, params); } else { - log.info(LogManager.getHeader(context, "skip_package_replace", + log.info(LogHelper.getHeader(context, "skip_package_replace", "Object was already replaced, package-skipped=" + pkgFile.getName())); } diff --git a/dspace-api/src/main/java/org/dspace/content/packager/DSpaceAIPIngester.java b/dspace-api/src/main/java/org/dspace/content/packager/DSpaceAIPIngester.java index 954a68bfc166..e7be7ab51190 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/DSpaceAIPIngester.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/DSpaceAIPIngester.java @@ -20,7 +20,7 @@ import org.dspace.content.crosswalk.MetadataValidationException; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.jdom.Element; +import org.jdom2.Element; /** * Subclass of the METS packager framework to ingest a DSpace diff --git a/dspace-api/src/main/java/org/dspace/content/packager/DSpaceMETSIngester.java b/dspace-api/src/main/java/org/dspace/content/packager/DSpaceMETSIngester.java index da3965534f0b..380764268c2c 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/DSpaceMETSIngester.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/DSpaceMETSIngester.java @@ -23,7 +23,7 @@ import org.dspace.core.Context; import org.dspace.core.factory.CoreServiceFactory; import org.dspace.core.service.PluginService; -import org.jdom.Element; +import org.jdom2.Element; /** * Packager plugin to ingest a diff --git a/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java b/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java index ffdb304802b1..3399bdf0f07e 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/METSManifest.java @@ -11,6 +11,7 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; @@ -34,15 +35,17 @@ import org.dspace.core.factory.CoreServiceFactory; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Content; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.Namespace; -import org.jdom.input.SAXBuilder; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; -import org.jdom.xpath.XPath; +import org.jdom2.Content; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; /** *

    @@ -381,15 +384,12 @@ public List getContentFiles() public List getMdFiles() throws MetadataValidationException { if (mdFiles == null) { - try { - // Use a special namespace with known prefix - // so we get the right prefix. - XPath xpath = XPath.newInstance("descendant::mets:mdRef"); - xpath.addNamespace(metsNS); - mdFiles = xpath.selectNodes(mets); - } catch (JDOMException je) { - throw new MetadataValidationException("Failed while searching for mdRef elements in manifest: ", je); - } + // Use a special namespace with known prefix + // so we get the right prefix. + XPathExpression xpath = + XPathFactory.instance() + .compile("descendant::mets:mdRef", Filters.element(), null, metsNS); + mdFiles = xpath.evaluate(mets); } return mdFiles; } @@ -413,25 +413,22 @@ public String getOriginalFilePath(Element file) { return null; } - try { - XPath xpath = XPath.newInstance( - "mets:fileSec/mets:fileGrp[@USE=\"CONTENT\"]/mets:file[@GROUPID=\"" + groupID + "\"]"); - xpath.addNamespace(metsNS); - List oFiles = xpath.selectNodes(mets); - if (oFiles.size() > 0) { - if (log.isDebugEnabled()) { - log.debug("Got ORIGINAL file for derived=" + file.toString()); - } - Element flocat = ((Element) oFiles.get(0)).getChild("FLocat", metsNS); - if (flocat != null) { - return flocat.getAttributeValue("href", xlinkNS); - } + XPathExpression xpath = + XPathFactory.instance() + .compile( + "mets:fileSec/mets:fileGrp[@USE=\"CONTENT\"]/mets:file[@GROUPID=\"" + groupID + "\"]", + Filters.element(), null, metsNS); + List oFiles = xpath.evaluate(mets); + if (oFiles.size() > 0) { + if (log.isDebugEnabled()) { + log.debug("Got ORIGINAL file for derived=" + file.toString()); + } + Element flocat = oFiles.get(0).getChild("FLocat", metsNS); + if (flocat != null) { + return flocat.getAttributeValue("href", xlinkNS); } - return null; - } catch (JDOMException je) { - log.warn("Got exception on XPATH looking for Original file, " + je.toString()); - return null; } + return null; } // translate bundle name from METS to DSpace; METS may be "CONTENT" @@ -656,7 +653,7 @@ private List getMdContentAsXml(Element mdSec, Mdref callback) String mimeType = mdWrap.getAttributeValue("MIMETYPE"); if (mimeType != null && mimeType.equalsIgnoreCase("text/xml")) { - byte value[] = Base64.decodeBase64(bin.getText().getBytes()); + byte value[] = Base64.decodeBase64(bin.getText().getBytes(StandardCharsets.UTF_8)); Document mdd = parser.build(new ByteArrayInputStream(value)); List result = new ArrayList<>(1); result.add(mdd.getRootElement()); @@ -724,13 +721,13 @@ public InputStream getMdContentAsStream(Element mdSec, Mdref callback) throw new MetadataValidationException( "Invalid METS Manifest: mdWrap element with neither xmlData nor binData child."); } else { - byte value[] = Base64.decodeBase64(bin.getText().getBytes()); + byte value[] = Base64.decodeBase64(bin.getText().getBytes(StandardCharsets.UTF_8)); return new ByteArrayInputStream(value); } } else { XMLOutputter outputPretty = new XMLOutputter(Format.getPrettyFormat()); return new ByteArrayInputStream( - outputPretty.outputString(xmlData.getChildren()).getBytes()); + outputPretty.outputString(xmlData.getChildren()).getBytes(StandardCharsets.UTF_8)); } } else { mdRef = mdSec.getChild("mdRef", metsNS); @@ -887,20 +884,16 @@ public String getParentOwnerLink() // use only when path varies each time you call it. protected Element getElementByXPath(String path, boolean nullOk) throws MetadataValidationException { - try { - XPath xpath = XPath.newInstance(path); - xpath.addNamespace(metsNS); - xpath.addNamespace(xlinkNS); - Object result = xpath.selectSingleNode(mets); - if (result == null && nullOk) { - return null; - } else if (result instanceof Element) { - return (Element) result; - } else { - throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\""); - } - } catch (JDOMException je) { - throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\"", je); + XPathExpression xpath = + XPathFactory.instance() + .compile(path, Filters.element(), null, metsNS, xlinkNS); + Element result = xpath.evaluateFirst(mets); + if (result == null && nullOk) { + return null; + } else if (result == null && !nullOk) { + throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\""); + } else { + return result; } } @@ -1176,7 +1169,7 @@ protected void crosswalkXmd(Context context, PackageParameters params, "Invalid METS Manifest: mdWrap element for streaming crosswalk without binData " + "child."); } else { - byte value[] = Base64.decodeBase64(bin.getText().getBytes()); + byte value[] = Base64.decodeBase64(bin.getText().getBytes(StandardCharsets.UTF_8)); sxwalk.ingest(context, dso, new ByteArrayInputStream(value), mdWrap.getAttributeValue("MIMETYPE")); @@ -1302,6 +1295,6 @@ public InputStream getMetsAsStream() { XMLOutputter outputPretty = new XMLOutputter(Format.getPrettyFormat()); return new ByteArrayInputStream( - outputPretty.outputString(mets).getBytes()); + outputPretty.outputString(mets).getBytes(StandardCharsets.UTF_8)); } } diff --git a/dspace-api/src/main/java/org/dspace/content/packager/PDFPackager.java b/dspace-api/src/main/java/org/dspace/content/packager/PDFPackager.java index c6036d2261b3..6c7baad45497 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/PDFPackager.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/PDFPackager.java @@ -45,7 +45,7 @@ import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.core.SelfNamedPlugin; import org.dspace.core.Utils; import org.dspace.workflow.WorkflowException; @@ -166,7 +166,7 @@ public DSpaceObject ingest(Context context, DSpaceObject parent, workspaceItemService.update(context, wi); success = true; - log.info(LogManager.getHeader(context, "ingest", + log.info(LogHelper.getHeader(context, "ingest", "Created new Item, db ID=" + String.valueOf(myitem.getID()) + ", WorkspaceItem ID=" + String.valueOf(wi.getID()))); diff --git a/dspace-api/src/main/java/org/dspace/content/packager/PackageException.java b/dspace-api/src/main/java/org/dspace/content/packager/PackageException.java index 58f2621afb86..13c705c932b1 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/PackageException.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/PackageException.java @@ -19,7 +19,6 @@ * exceptions. This class is intended for declarations and catch clauses. * * @author Larry Stone - * @version $Revision$ */ public class PackageException extends Exception { /** @@ -76,10 +75,4 @@ public void log(Logger log) { log.error(sw.toString()); } } - - public String toString() { - String base = getClass().getName() + ": " + getMessage(); - return (getCause() == null) ? base : - base + ", Reason: " + getCause().toString(); - } } diff --git a/dspace-api/src/main/java/org/dspace/content/packager/PackageParameters.java b/dspace-api/src/main/java/org/dspace/content/packager/PackageParameters.java index 1bd68bea262d..b472a52c3bad 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/PackageParameters.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/PackageParameters.java @@ -57,7 +57,7 @@ public static PackageParameters create(ServletRequest request) { } else if (v.length == 1) { result.setProperty(name, v[0]); } else { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int i = 0; i < v.length; ++i) { if (i > 0) { sb.append(SEPARATOR); diff --git a/dspace-api/src/main/java/org/dspace/content/packager/PackageUtils.java b/dspace-api/src/main/java/org/dspace/content/packager/PackageUtils.java index e45aa7d69943..9e7d870076aa 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/PackageUtils.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/PackageUtils.java @@ -17,6 +17,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.UUID; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -183,7 +184,7 @@ public static void addDepositLicense(Context context, String license, Item item, Collection collection) throws SQLException, IOException, AuthorizeException { if (license == null) { - license = collection.getLicenseCollection(); + license = collectionService.getLicense(collection); } InputStream lis = new ByteArrayInputStream(license.getBytes()); @@ -447,6 +448,7 @@ public static Bitstream findDepositLicense(Context context, Item item) * @param parent Parent Object * @param type Type of new Object * @param handle Handle of new Object (may be null) + * @param uuid * @param params Properties-style list of options (interpreted by each packager). * @return newly created DSpace Object (or null) * @throws AuthorizeException if authorization error @@ -454,29 +456,55 @@ public static Bitstream findDepositLicense(Context context, Item item) * @throws IOException if IO error */ public static DSpaceObject createDSpaceObject(Context context, DSpaceObject parent, int type, String handle, - PackageParameters params) + UUID uuid, PackageParameters params) throws AuthorizeException, SQLException, IOException { DSpaceObject dso = null; switch (type) { case Constants.COLLECTION: - dso = collectionService.create(context, (Community) parent, handle); + Collection collection = collectionService.find(context, uuid); + if (collection != null) { + dso = collectionService.create(context, (Community) parent, handle); + } else { + dso = collectionService.create(context, (Community) parent, handle, uuid); + + } return dso; case Constants.COMMUNITY: // top-level community? if (parent == null || parent.getType() == Constants.SITE) { - dso = communityService.create(null, context, handle); + Community community = communityService.find(context, uuid); + if (community != null) { + dso = communityService.create(null, context, handle); + } else { + dso = communityService.create(null, context, handle, uuid); + } } else { - dso = communityService.createSubcommunity(context, ((Community) parent), handle); + Community community = communityService.find(context, uuid); + if (community != null) { + dso = communityService.createSubcommunity(context, ((Community) parent), handle); + } else { + dso = communityService.createSubcommunity(context, ((Community) parent), handle, uuid); + } } return dso; case Constants.ITEM: //Initialize a WorkspaceItem //(Note: Handle is not set until item is finished) - WorkspaceItem wsi = workspaceItemService - .create(context, (Collection) parent, params.useCollectionTemplate()); + Item item = itemService.find(context, uuid); + if (item != null) { + return item; + } + + WorkspaceItem wsi = null; + if (!params.replaceModeEnabled()) { + wsi = workspaceItemService.create(context, (Collection)parent, params.useCollectionTemplate()); + } else { + wsi = workspaceItemService.create(context, (Collection)parent, + uuid, params.useCollectionTemplate()); + } // Please note that we are returning an Item which is *NOT* yet in the Archive, // and doesn't yet have a handle assigned. diff --git a/dspace-api/src/main/java/org/dspace/content/packager/RoleDisseminator.java b/dspace-api/src/main/java/org/dspace/content/packager/RoleDisseminator.java index 93a2f446d080..f627779af8dc 100644 --- a/dspace-api/src/main/java/org/dspace/content/packager/RoleDisseminator.java +++ b/dspace-api/src/main/java/org/dspace/content/packager/RoleDisseminator.java @@ -35,7 +35,7 @@ import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; -import org.jdom.Namespace; +import org.jdom2.Namespace; /** * Plugin to export all Group and EPerson objects in XML, perhaps for reloading. @@ -534,7 +534,7 @@ protected List findAssociatedGroups(Context context, DSpaceObject object) } // FINAL CATCH-ALL -> Find any other groups where name begins with "COLLECTION__" - // (Necessary cause XMLUI allows you to generate a 'COLLECTION__DEFAULT_READ' group) + // (Necessary because the old XMLUI allowed you to generate a 'COLLECTION__DEFAULT_READ' group) List matchingGroups = groupService.search(context, "COLLECTION\\_" + collection.getID() + "\\_"); for (Group g : matchingGroups) { if (!list.contains(g)) { diff --git a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java index 5f9982af689a..8effabf28435 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/BitstreamService.java @@ -33,6 +33,7 @@ */ public interface BitstreamService extends DSpaceObjectService, DSpaceObjectLegacySupportService { + @Override public Bitstream find(Context context, UUID id) throws SQLException; public List findAll(Context context) throws SQLException; @@ -53,7 +54,7 @@ public interface BitstreamService extends DSpaceObjectService, DSpace * @return the clone * @throws SQLException if database error */ - public Bitstream clone(Context context, Bitstream bitstream) throws SQLException; + public Bitstream clone(Context context, Bitstream bitstream) throws SQLException, AuthorizeException; /** * Create a new bitstream, with a new ID. The checksum and file size are @@ -182,7 +183,7 @@ public InputStream retrieve(Context context, Bitstream bitstream) * @return a list of all bitstreams that have been "deleted" * @throws SQLException if database error */ - public List findDeletedBitstreams(Context context) throws SQLException; + public List findDeletedBitstreams(Context context, int limit, int offset) throws SQLException; /** diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index 67d45939d52f..0a56105ead40 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -12,8 +12,10 @@ import java.sql.SQLException; import java.util.List; import java.util.Map; +import java.util.UUID; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -32,6 +34,11 @@ public interface CollectionService extends DSpaceObjectService, DSpaceObjectLegacySupportService { + /* + * Field used to sort community and collection lists at solr + */ + public static final String SOLR_SORT_FIELD = "dc.title_sort"; + /** * Create a new collection with a new ID. * Once created the collection is added to the given community @@ -45,7 +52,6 @@ public interface CollectionService public Collection create(Context context, Community community) throws SQLException, AuthorizeException; - /** * Create a new collection with the supplied handle and with a new ID. * Once created the collection is added to the given community @@ -60,6 +66,21 @@ public Collection create(Context context, Community community) throws SQLExcepti public Collection create(Context context, Community community, String handle) throws SQLException, AuthorizeException; + /** + * Create a new collection with the supplied handle and ID. + * Once created the collection is added to the given community + * + * @param context DSpace context object + * @param community DSpace Community (parent) + * @param handle the pre-determined Handle to assign to the new collection + * @param uuid the pre-determined UUID to assign to the new collection + * @return the newly created collection + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public Collection create(Context context, Community community, String handle, UUID uuid) throws SQLException, + AuthorizeException; + /** * Get all collections in the system. These are alphabetically sorted by * collection name. @@ -308,7 +329,7 @@ public List findAuthorized(Context context, Community community, int throws java.sql.SQLException; /** - * + * * @param context DSpace Context * @param group EPerson Group * @return the collection, if any, that has the specified group as administrators or submitters @@ -344,11 +365,43 @@ public List findAuthorized(Context context, Community community, int Group createDefaultReadGroup(Context context, Collection collection, String typeOfGroupString, int defaultRead) throws SQLException, AuthorizeException; + /** + * This method will return the name to give to the group created by the + * {@link #createDefaultReadGroup(Context, Collection, String, int)} method + * + * @param collection The DSpace collection to use in the name generation + * @param typeOfGroupString The type of group to use in the name generation + * @return the name to give to the group that hold default read for the collection + */ + String getDefaultReadGroupName(Collection collection, String typeOfGroupString); + + /** + * Returns Collections for which the current user has 'submit' privileges. + * NOTE: for better performance, this method retrieves its results from an + * index (cache) and does not query the database directly. + * This means that results may be stale or outdated until https://github.com/DSpace/DSpace/issues/2853 is resolved" + * + * @param q limit the returned collection to those with metadata values matching the query terms. + * The terms are used to make also a prefix query on SOLR so it can be used to implement + * an autosuggest feature over the collection name + * @param context DSpace Context + * @param community parent community + * @param entityType limit the returned collection to those related to given entity type + * @param offset the position of the first result to return + * @param limit paging limit + * @return discovery search result objects + * @throws SQLException if something goes wrong + * @throws SearchServiceException if search error + */ + public List findCollectionsWithSubmit(String q, Context context, Community community, + String entityType, int offset, int limit) throws SQLException, SearchServiceException; + /** * Returns Collections for which the current user has 'submit' privileges. * NOTE: for better performance, this method retrieves its results from an * index (cache) and does not query the database directly. - * This means that results may be stale or outdated until DS-4524 is resolved" + * This means that results may be stale or outdated until + * https://github.com/DSpace/DSpace/issues/2853 is resolved" * * @param q limit the returned collection to those with metadata values matching the query terms. * The terms are used to make also a prefix query on SOLR so it can be used to implement @@ -364,11 +417,40 @@ Group createDefaultReadGroup(Context context, Collection collection, String type public List findCollectionsWithSubmit(String q, Context context, Community community, int offset, int limit) throws SQLException, SearchServiceException; + /** + * Retrieve the first collection in the community or its descending that support + * the provided entityType + * + * @param context the DSpace context + * @param community the root from where the search start + * @param entityType the requested entity type + * @return the first collection in the community or its descending + * that support the provided entityType + */ + public Collection retrieveCollectionWithSubmitByCommunityAndEntityType(Context context, Community community, + String entityType); + + /** + * Retrieve the close collection to the item for which the current user has + * 'submit' privileges that support the provided entityType. Close mean the + * collection that can be reach with the minimum steps starting from the item + * (owningCollection, brothers collections, etc) + * + * @param context the DSpace context + * @param item the item from where the search start + * @param entityType the requested entity type + * @return the first collection in the community or its descending + * that support the provided entityType + */ + public Collection retrieveCollectionWithSubmitByEntityType(Context context, Item item, String entityType) + throws SQLException; + /** * Counts the number of Collection for which the current user has 'submit' privileges. * NOTE: for better performance, this method retrieves its results from an index (cache) * and does not query the database directly. - * This means that results may be stale or outdated until DS-4524 is resolved." + * This means that results may be stale or outdated until + * https://github.com/DSpace/DSpace/issues/2853 is resolved." * * @param q limit the returned collection to those with metadata values matching the query terms. * The terms are used to make also a prefix query on SOLR so it can be used to implement @@ -381,4 +463,48 @@ public List findCollectionsWithSubmit(String q, Context context, Com */ public int countCollectionsWithSubmit(String q, Context context, Community community) throws SQLException, SearchServiceException; + + /** + * Counts the number of Collection for which the current user has 'submit' privileges. + * NOTE: for better performance, this method retrieves its results from an index (cache) + * and does not query the database directly. + * This means that results may be stale or outdated until + * https://github.com/DSpace/DSpace/issues/2853 is resolved." + * + * @param q limit the returned collection to those with metadata values matching the query terms. + * The terms are used to make also a prefix query on SOLR so it can be used to implement + * an autosuggest feature over the collection name + * @param context DSpace Context + * @param community parent community + * @param entityType limit the returned collection to those related to given entity type + * @return total collections found + * @throws SQLException if something goes wrong + * @throws SearchServiceException if search error + */ + public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType) + throws SQLException, SearchServiceException; + + /** + * Returns a list of all collections for a specific entity type. + * NOTE: for better performance, this method retrieves its results from an index (cache) + * and does not query the database directly. + * This means that results may be stale or outdated until + * https://github.com/DSpace/DSpace/issues/2853 is resolved." + * + * @param context DSpace Context + * @param entityType limit the returned collection to those related to given entity type + * @return list of collections found + * @throws SearchServiceException if search error + */ + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException; + + /** + * Returns total collection archived items + * + * @param collection Collection + * @return total collection archived items + * @throws ItemCountException + */ + int countArchivedItems(Collection collection) throws ItemCountException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java index 54512bb9f506..c089bcec8df1 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CommunityService.java @@ -11,8 +11,10 @@ import java.io.InputStream; import java.sql.SQLException; import java.util.List; +import java.util.UUID; import org.dspace.authorize.AuthorizeException; +import org.dspace.browse.ItemCountException; import org.dspace.content.Bitstream; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -53,6 +55,20 @@ public interface CommunityService extends DSpaceObjectService, DSpace public Community create(Community parent, Context context, String handle) throws SQLException, AuthorizeException; + /** + * Create a new top-level community, with a new ID. + * + * @param parent parent community + * @param context DSpace context object + * @param handle the pre-determined Handle to assign to the new community + * @param uuid the pre-determined uuid to assign to the new community + * @return the newly created community + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public Community create(Community parent, Context context, + String handle, UUID uuid) throws SQLException, AuthorizeException; + /** * Get a list of all communities in the system. These are alphabetically @@ -202,6 +218,20 @@ public Community createSubcommunity(Context context, Community parentCommunity) public Community createSubcommunity(Context context, Community parentCommunity, String handle) throws SQLException, AuthorizeException; + /** + * Create a new sub-community within this community. + * + * @param context context + * @param handle the pre-determined Handle to assign to the new community + * @param parentCommunity parent community + * @param uuid the pre-determined UUID to assign to the new community + * @return the new community + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public Community createSubcommunity(Context context, Community parentCommunity, String handle, UUID uuid) + throws SQLException, AuthorizeException; + /** * Add an existing community as a subcommunity to the community * @@ -263,4 +293,13 @@ public void removeSubcommunity(Context context, Community parentCommunity, Commu public List findAuthorizedGroupMapped(Context context, List actions) throws SQLException; int countTotal(Context context) throws SQLException; + + /** + * Returns total community archived items + * + * @param community Community + * @return total community archived items + * @throws ItemCountException + */ + int countArchivedItems(Community community) throws ItemCountException; } diff --git a/dspace-api/src/main/java/org/dspace/content/service/EntityTypeService.java b/dspace-api/src/main/java/org/dspace/content/service/EntityTypeService.java index f4d9a15bb2a8..d0a1a498ceb9 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/EntityTypeService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/EntityTypeService.java @@ -7,9 +7,11 @@ */ package org.dspace.content.service; +import java.io.IOException; import java.sql.SQLException; import java.util.List; +import org.apache.solr.client.solrj.SolrServerException; import org.dspace.authorize.AuthorizeException; import org.dspace.content.EntityType; import org.dspace.core.Context; @@ -56,4 +58,46 @@ public interface EntityTypeService extends DSpaceCRUDService { * @throws AuthorizeException If something geos wrong with authorizations */ public EntityType create(Context context, String entityTypeString) throws SQLException, AuthorizeException; + + /** + * Retrieves all entity types related to the collections on which the current user can deposit + * + * @param context DSpace context object + * @return + * @throws SQLException If database error + * @throws SolrServerException If there is a problem in communicating with Solr + * @throws IOException If IO error + */ + public List getSubmitAuthorizedTypes(Context context) throws SQLException, SolrServerException, IOException; + + /** + * + * @param context DSpace context object + * @param names List of Entity type names that you want to retrieve + * @param limit paging limit + * @param offset the position of the first result to return + * @return + * @throws SQLException if database error + */ + public List getEntityTypesByNames(Context context, List names,Integer limit, Integer offset) + throws SQLException; + + /** + * + * @param context DSpace context object + * @param names List of Entity type names that you want to retrieve + * @return + * @throws SQLException if database error + */ + public int countEntityTypesByNames(Context context, List names) throws SQLException; + + /** + * Initializes the EntityType names, and marks them "permanent". + * + * @param context DSpace context object + * @throws SQLException Database exception + * @throws AuthorizeException Authorization error + */ + public void initDefaultEntityTypeNames(Context context) throws SQLException, AuthorizeException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/FeedbackService.java b/dspace-api/src/main/java/org/dspace/content/service/FeedbackService.java new file mode 100644 index 000000000000..d21afd678000 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/service/FeedbackService.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.service; +import java.io.IOException; +import javax.mail.MessagingException; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.core.Context; + +/** + * Service interface class for the Feedback object. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public interface FeedbackService { + + /** + * This method sends the feeback email to the recipient passed as parameter + * @param context current DSpace application context + * @param request current servlet request + * @param recipientEmail recipient to which mail is sent + * @param senderEmail email address of the sender + * @param message message body + * @param page page from which user accessed and filled feedback form + * @throws IOException + * @throws MessagingException + */ + public void sendEmail(Context context, HttpServletRequest request, String recipientEmail, String senderEmail, + String message, String page) throws IOException, MessagingException; + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java b/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java index 67ac2e20499c..d00c62cc91d8 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/InstallItemService.java @@ -83,4 +83,15 @@ public Item restoreItem(Context c, InProgressSubmission is, public String getBitstreamProvenanceMessage(Context context, Item myitem) throws SQLException; + /** + * Generate provenance description of direct item submission (not through workflow). + * + * @param context context + * @param item the item to generate description for + * @return provenance description + * @throws SQLException if database error + */ + public String getSubmittedByProvenanceMessage(Context context, Item item) + throws SQLException;; + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index 2a38488f7a68..43a804cde2eb 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -21,12 +21,13 @@ import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.Community; +import org.dspace.content.EntityType; import org.dspace.content.Item; -import org.dspace.content.MetadataField; import org.dspace.content.MetadataValue; import org.dspace.content.Thumbnail; import org.dspace.content.WorkspaceItem; import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; @@ -43,9 +44,8 @@ public interface ItemService public Thumbnail getThumbnail(Context context, Item item, boolean requireOriginal) throws SQLException; /** - * Create a new item, with a new internal ID. This method is not public, - * since items need to be created as workspace items. Authorisation is the - * responsibility of the caller. + * Create a new item, with a new internal ID. Authorization is done + * inside of this method. * * @param context DSpace context object * @param workspaceItem in progress workspace item @@ -55,6 +55,19 @@ public interface ItemService */ public Item create(Context context, WorkspaceItem workspaceItem) throws SQLException, AuthorizeException; + /** + * Create a new item, with a provided ID. Authorisation is done + * inside of this method. + * + * @param context DSpace context object + * @param workspaceItem in progress workspace item + * @param uuid the pre-determined UUID to assign to the new item + * @return the newly created item + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public Item create(Context context, WorkspaceItem workspaceItem, UUID uuid) throws SQLException, AuthorizeException; + /** * Create an empty template item for this collection. If one already exists, * no action is taken. Caution: Make sure you call update on @@ -99,8 +112,22 @@ public interface ItemService * @return an iterator over the items in the archive. * @throws SQLException if database error */ + @Deprecated public Iterator findAllUnfiltered(Context context) throws SQLException; + /** + * Find all items that are: + * - NOT in the workspace + * - NOT in the workflow + * - NOT a template item for e.g. a collection + * + * This implies that the result also contains older versions of items and withdrawn items. + * @param context the DSpace context. + * @return iterator over all regular items. + * @throws SQLException if database error. + */ + public Iterator findAllRegularItems(Context context) throws SQLException; + /** * Find all the items in the archive by a given submitter. The order is * indeterminate. Only items with the "in archive" flag set are included. @@ -445,7 +472,7 @@ public void replaceAllBitstreamPolicies(Context context, Item item, List findArchivedByMetadataField(Context context, String schema, + String element, String qualifier, + String value) throws SQLException, AuthorizeException; + + /** + * Returns an iterator of in archive items possessing the passed metadata field, or only + * those matching the passed value, if value is not Item.ANY + * + * @param context DSpace context object + * @param metadataField metadata + * @param value field value or Item.ANY to match any value + * @return an iterator over the items matching that authority value + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public Iterator findArchivedByMetadataField(Context context, String metadataField, String value) + throws SQLException, AuthorizeException; + /** * Returns an iterator of Items possessing the passed metadata field, or only * those matching the passed value, if value is not Item.ANY @@ -555,11 +748,6 @@ public Iterator findByMetadataField(Context context, String schema, String element, String qualifier, String value) throws SQLException, AuthorizeException, IOException; - public Iterator findByMetadataQuery(Context context, List> listFieldList, - List query_op, List query_val, List collectionUuids, - String regexClause, int offset, int limit) - throws SQLException, AuthorizeException, IOException; - /** * Find all the items in the archive with a given authority key value * in the indicated metadata field. @@ -576,7 +764,7 @@ public Iterator findByMetadataQuery(Context context, List findByAuthorityValue(Context context, String schema, String element, String qualifier, String value) - throws SQLException, AuthorizeException, IOException; + throws SQLException, AuthorizeException; public Iterator findByMetadataFieldAuthority(Context context, String mdString, String authority) @@ -680,6 +868,27 @@ public Iterator findByLastModifiedSince(Context context, Date last) */ int countWithdrawnItems(Context context) throws SQLException; + /** + * finds all items for which the current user has editing rights + * @param context DSpace context object + * @param offset page offset + * @param limit page size limit + * @return list of items for which the current user has editing rights + * @throws SQLException + * @throws SearchServiceException + */ + public List findItemsWithEdit(Context context, int offset, int limit) + throws SQLException, SearchServiceException; + + /** + * counts all items for which the current user has editing rights + * @param context DSpace context object + * @return list of items for which the current user has editing rights + * @throws SQLException + * @throws SearchServiceException + */ + public int countItemsWithEdit(Context context) throws SQLException, SearchServiceException; + /** * Check if the supplied item is an inprogress submission * @@ -741,4 +950,19 @@ public Iterator findByLastModifiedSince(Context context, Date last) public List getMetadata(Item item, String schema, String element, String qualifier, String lang, boolean enableVirtualMetadata); + /** + * Retrieve the label of the entity type of the given item. + * @param item the item. + * @return the label of the entity type, taken from the item metadata, or null if not found. + */ + public String getEntityTypeLabel(Item item); + + /** + * Retrieve the entity type of the given item. + * @param context the DSpace context. + * @param item the item. + * @return the entity type of the given item, or null if not found. + */ + public EntityType getEntityType(Context context, Item item) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java b/dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java index fab4616ef3d2..719f966e4622 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/RelationshipService.java @@ -9,11 +9,14 @@ import java.sql.SQLException; import java.util.List; +import java.util.UUID; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Item; import org.dspace.content.Relationship; +import org.dspace.content.Relationship.LatestVersionStatus; import org.dspace.content.RelationshipType; +import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId; import org.dspace.core.Context; import org.dspace.service.DSpaceCRUDService; @@ -48,6 +51,25 @@ public interface RelationshipService extends DSpaceCRUDService { List findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted) throws SQLException; + /** + * Retrieves the list of Relationships currently in the system for which the given Item is either + * a leftItem or a rightItem object + * @param context The relevant DSpace context + * @param item The Item that has to be the left or right item for the relationship to be + * included in the list + * @param limit paging limit + * @param offset paging offset + * @param excludeTilted If true, excludes tilted relationships + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return The list of relationships for which each relationship adheres to the above + * listed constraint + * @throws SQLException If something goes wrong + */ + List findByItem( + Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest + ) throws SQLException; + /** * Retrieves the full list of relationships currently in the system * @param context The relevant DSpace context @@ -78,30 +100,54 @@ List findByItem(Context context, Item item, Integer limit, Integer public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException; /** - * This method returns the next leftplace integer to use for a relationship with this item as the leftItem + * Move the given relationship to a new leftPlace and/or rightPlace. * - * @param context The relevant DSpace context - * @param item The item that has to be the leftItem of a relationship for it to qualify - * @return The next integer to be used for the leftplace of a relationship with the given item - * as a left item - * @throws SQLException If something goes wrong + * This will + * 1. verify whether the move is authorized + * 2. move the relationship to the specified left/right place + * 3. update the left/right place of other relationships and/or metadata in order to resolve the move without + * leaving any gaps + * + * At least one of the new places should be non-null, otherwise no changes will be made. + * + * @param context The relevant DSpace context + * @param relationship The Relationship to move + * @param newLeftPlace The value to set the leftPlace of this Relationship to + * @param newRightPlace The value to set the rightPlace of this Relationship to + * @return The moved relationship with updated place variables + * @throws SQLException If something goes wrong + * @throws AuthorizeException If the user is not authorized to update the Relationship or its Items */ - int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException; + Relationship move(Context context, Relationship relationship, Integer newLeftPlace, Integer newRightPlace) + throws SQLException, AuthorizeException; /** - * This method returns the next rightplace integer to use for a relationship with this item as the rightItem + * Move the given relationship to a new leftItem and/or rightItem. * - * @param context The relevant DSpace context - * @param item The item that has to be the rightitem of a relationship for it to qualify - * @return The next integer to be used for the rightplace of a relationship with the given item - * as a right item - * @throws SQLException If something goes wrong + * This will + * 1. move the relationship to the last place in its current left or right Item. This ensures that we don't leave + * any gaps when moving the relationship to a new Item. + * If only one of the relationship's Items is changed,the order of relationships and metadatain the other + * will not be affected + * 2. insert the relationship into the new Item(s) + * + * At least one of the new Items should be non-null, otherwise no changes will be made. + * + * @param context The relevant DSpace context + * @param relationship The Relationship to move + * @param newLeftItem The value to set the leftItem of this Relationship to + * @param newRightItem The value to set the rightItem of this Relationship to + * @return The moved relationship with updated left/right Items variables + * @throws SQLException If something goes wrong + * @throws AuthorizeException If the user is not authorized to update the Relationship or its Items */ - int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException; + Relationship move(Context context, Relationship relationship, Item newLeftItem, Item newRightItem) + throws SQLException, AuthorizeException; /** * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * Item object and for which the RelationshipType object is equal to the relationshipType property + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context The relevant DSpace context * @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param relationshipType The RelationshipType object that will be used to check the Relationship on @@ -116,6 +162,7 @@ public List findByItemAndRelationshipType(Context context, Item it /** * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * Item object and for which the RelationshipType object is equal to the relationshipType property + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context The relevant DSpace context * @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param relationshipType The RelationshipType object that will be used to check the Relationship on @@ -130,6 +177,24 @@ public List findByItemAndRelationshipType(Context context, Item it /** * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * Item object and for which the RelationshipType object is equal to the relationshipType property + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type + * @param context The relevant DSpace context + * @param item The Item object to be matched on the leftItem or rightItem for the relationship + * @param relationshipType The RelationshipType object that will be used to check the Relationship on + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return The list of Relationship objects that have the given Item object as leftItem or rightItem and + * for which the relationshipType property is equal to the given RelationshipType + * @throws SQLException If something goes wrong + */ + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, int limit, int offset, boolean excludeNonLatest + ) throws SQLException; + + /** + * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given + * Item object and for which the RelationshipType object is equal to the relationshipType property + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context The relevant DSpace context * @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param relationshipType The RelationshipType object that will be used to check the Relationship on @@ -144,17 +209,51 @@ public List findByItemAndRelationshipType(Context context, Item it throws SQLException; /** - * This method will update the place for the Relationship and all other relationships found by the items and - * relationship type of the given Relationship. It will give this Relationship the last place in both the - * left and right place determined by querying for the list of leftRelationships and rightRelationships - * by the leftItem, rightItem and relationshipType of the given Relationship. - * @param context The relevant DSpace context - * @param relationship The Relationship object that will have it's place updated and that will be used - * to retrieve the other relationships whose place might need to be updated - * @throws SQLException If something goes wrong + * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given + * Item object and for which the RelationshipType object is equal to the relationshipType property + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type + * @param context The relevant DSpace context + * @param item The Item object to be matched on the leftItem or rightItem for the relationship + * @param relationshipType The RelationshipType object that will be used to check the Relationship on + * @param isLeft Is the item left or right + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return The list of Relationship objects that have the given Item object as leftItem or rightItem and + * for which the relationshipType property is equal to the given RelationshipType + * @throws SQLException If something goes wrong */ - public void updatePlaceInRelationship(Context context, Relationship relationship) - throws SQLException, AuthorizeException; + public List findByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset, + boolean excludeNonLatest + ) throws SQLException; + + /** + * This method returns the UUIDs of all items that have a relationship with the given item, from the perspective + * of the other item. In other words, given a relationship with the given item, the given item should have + * "latest status" in order for the other item uuid to be returned. + * + * This method differs from the "excludeNonLatest" property in other methods, + * because in this method the current item should have "latest status" to return the other item, + * whereas with "excludeNonLatest" the other item should have "latest status" to be returned. + * + * This method is used to index items in solr; when searching for related items of one of the returned uuids, + * the given item should appear as a search result. + * + * NOTE: This method does not return {@link Relationship}s for performance, because doing so would eagerly fetch + * the items on both sides, which is unnecessary. + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type. + * @param context the DSpace context. + * @param latestItem the target item; only relationships where this item has "latest status" should be considered. + * @param relationshipType the relationship type for which relationships should be selected. + * @param isLeft whether the entity type of the item occurs on the left or right side of the relationship type. + * This is redundant in most cases, but necessary because relationship types my have + * the same entity type on both sides. + * @return a list containing pairs of relationship ids and item uuids. + * @throws SQLException if something goes wrong. + */ + public List findByLatestItemAndRelationshipType( + Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft + ) throws SQLException; /** * This method will update the given item's metadata order. @@ -173,6 +272,7 @@ public void updatePlaceInRelationship(Context context, Relationship relationship /** * This method returns a list of Relationship objects for which the relationshipType property is equal to the given * RelationshipType object + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context The relevant DSpace context * @param relationshipType The RelationshipType object that will be used to check the Relationship on * @return The list of Relationship objects for which the given RelationshipType object is equal @@ -184,6 +284,7 @@ public void updatePlaceInRelationship(Context context, Relationship relationship /** * This method returns a list of Relationship objets for which the relationshipType property is equal to the given * RelationshipType object + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context The relevant DSpace context * @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param limit paging limit @@ -197,6 +298,27 @@ List findByRelationshipType(Context context, RelationshipType rela /** * This method is used to construct a Relationship object with all it's variables + * @param c The relevant DSpace context + * @param leftItem The leftItem Item object for the relationship + * @param rightItem The rightItem Item object for the relationship + * @param relationshipType The RelationshipType object for the relationship + * @param leftPlace The leftPlace integer for the relationship + * @param rightPlace The rightPlace integer for the relationship + * @param leftwardValue The leftwardValue string for the relationship + * @param rightwardValue The rightwardValue string for the relationship + * @param latestVersionStatus The latestVersionStatus value for the relationship + * @return The created Relationship object with the given properties + * @throws AuthorizeException If something goes wrong + * @throws SQLException If something goes wrong + */ + Relationship create( + Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace, + String leftwardValue, String rightwardValue, LatestVersionStatus latestVersionStatus + ) throws AuthorizeException, SQLException; + + /** + * This method is used to construct a Relationship object with all it's variables, + * except the latest version status * @param c The relevant DSpace context * @param leftItem The leftItem Item object for the relationship * @param rightItem The rightItem Item object for the relationship @@ -209,14 +331,15 @@ List findByRelationshipType(Context context, RelationshipType rela * @throws AuthorizeException If something goes wrong * @throws SQLException If something goes wrong */ - Relationship create(Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, - int leftPlace, int rightPlace, String leftwardValue, String rightwardValue) - throws AuthorizeException, SQLException; + Relationship create( + Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace, + String leftwardValue, String rightwardValue + ) throws AuthorizeException, SQLException; /** * This method is used to construct a Relationship object with all it's variables, - * except the leftward and rightward labels + * except the leftward label, rightward label and latest version status * @param c The relevant DSpace context * @param leftItem The leftItem Item object for the relationship * @param rightItem The rightItem Item object for the relationship @@ -266,7 +389,7 @@ List findByTypeName(Context context, String typeName, Integer limi /** * Count total number of relationships (rows in relationship table) by a relationship type - * + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context context * @param relationshipType relationship type to filter by * @return total count @@ -286,10 +409,25 @@ List findByTypeName(Context context, String typeName, Integer limi */ int countByItem(Context context, Item item) throws SQLException; + /** + * This method returns a count of Relationship objects that have the given Item object + * as a leftItem or a rightItem + * @param context The relevant DSpace context + * @param item The item that should be either a leftItem or a rightItem of all + * the Relationship objects in the returned list + * @param excludeTilted if true, excludes tilted relationships + * @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version + * that is relevant + * @return The list of Relationship objects that contain either a left or a + * right item that is equal to the given item + * @throws SQLException If something goes wrong + */ + int countByItem(Context context, Item item, boolean excludeTilted, boolean excludeNonLatest) throws SQLException; + /** * Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating * whether the relationship should contain the item on the left side or not - * + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type * @param context context * @param relationshipType relationship type to filter by * @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not @@ -299,6 +437,22 @@ List findByTypeName(Context context, String typeName, Integer limi int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft) throws SQLException; + /** + * Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating + * whether the relationship should contain the item on the left side or not + * NOTE: tilted relationships are NEVER excluded when fetching one relationship type + * @param context context + * @param relationshipType relationship type to filter by + * @param isLeft Indicating whether the counted Relationships should have the given Item on the left side + * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version + * that is relevant for this relationship + * @return total count with the given parameters + * @throws SQLException if database error + */ + int countByItemAndRelationshipType( + Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest + ) throws SQLException; + /** * Count total number of relationships (rows in relationship table) * by a relationship leftward or rightward typeName @@ -335,4 +489,40 @@ void delete(Context context, Relationship relationship, boolean copyToLeftItem, */ void forceDelete(Context context, Relationship relationship, boolean copyToLeftItem, boolean copyToRightItem) throws SQLException, AuthorizeException; + + /** + * This method is used to retrieve relationships that match focusItem + * on the one hand and matches list of related items elsewhere. + * + * @param context DSpace context object + * @param focusUUID UUID of Item that will match left side if the param isLeft is true otherwise right side + * @param relationshipType Relationship type to filter by + * @param items List of UUID that will use to filter other side respect the focusUUID + * @param isLeft Indicating whether the counted Relationships should have + * the given Item on the left side or not + * @param limit paging limit + * @param offset paging offset + * @return + * @throws SQLException If database error + */ + public List findByItemRelationshipTypeAndRelatedList(Context context, UUID focusUUID, + RelationshipType relationshipType, List items, boolean isLeft, + int offset, int limit) throws SQLException; + + /** + * Count total number of relationships that match focusItem + * on the one hand and matches list of related items elsewhere. + * + * @param context DSpace context object + * @param focusUUID UUID of Item that will match left side if the param isLeft is true otherwise right side + * @param relationshipType Relationship type to filter by + * @param items List of UUID that will use to filter other side respect the focusUUID + * @param isLeft Indicating whether the counted Relationships should have + * the given Item on the left side or not + * @return + * @throws SQLException If database error + */ + public int countByItemRelationshipTypeAndRelatedList(Context context, UUID focusUUID, + RelationshipType relationshipType, List items, boolean isLeft) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/RelationshipTypeService.java b/dspace-api/src/main/java/org/dspace/content/service/RelationshipTypeService.java index 94ebbca41ef3..0d84ccd5e241 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/RelationshipTypeService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/RelationshipTypeService.java @@ -100,9 +100,29 @@ List findByLeftwardOrRightwardTypeName(Context context, String */ List findByEntityType(Context context, EntityType entityType) throws SQLException; + /** + * Returns a list of relationship types that matches provided EntityType object on any side of relationship + * + * @param context The relevant DSpace context + * @param entityType The EntityType object that will be used to check on + * @param limit Paging limit + * @param offset Paging offset + * @return + * @throws SQLException If database error + */ List findByEntityType(Context context, EntityType entityType, Integer limit, Integer offset) throws SQLException; + /** + * Count all RelationshipType objects for which the given EntityType + * is equal to either the leftType or the rightType + * + * @param context DSpace context object + * @param entityType The EntityType object used to check the leftType and rightType properties + * @return Total RelationshipType objects + * @throws SQLException If database error + */ + public int countByEntityType(Context context, EntityType entityType) throws SQLException; /** * This method will return a list of RelationshipType objects for which the given EntityType object is equal diff --git a/dspace-api/src/main/java/org/dspace/content/service/SupervisedItemService.java b/dspace-api/src/main/java/org/dspace/content/service/SupervisedItemService.java deleted file mode 100644 index 883e0f9fd2fb..000000000000 --- a/dspace-api/src/main/java/org/dspace/content/service/SupervisedItemService.java +++ /dev/null @@ -1,44 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.content.service; - -import java.sql.SQLException; -import java.util.List; - -import org.dspace.content.WorkspaceItem; -import org.dspace.core.Context; -import org.dspace.eperson.EPerson; - -/** - * Class to handle WorkspaceItems which are being supervised. - * - * @author Richard Jones - * @version $Revision$ - */ -public interface SupervisedItemService { - /** - * Get all workspace items which are being supervised - * - * @param context the context this object exists in - * @return array of SupervisedItems - * @throws SQLException if database error - */ - public List getAll(Context context) throws SQLException; - - - /** - * Get items being supervised by given EPerson - * - * @param ep the eperson who's items to supervise we want - * @param context the dspace context - * @return the items eperson is supervising in an array - * @throws SQLException if database error - */ - public List findbyEPerson(Context context, EPerson ep) - throws SQLException; -} diff --git a/dspace-api/src/main/java/org/dspace/content/service/WorkspaceItemService.java b/dspace-api/src/main/java/org/dspace/content/service/WorkspaceItemService.java index 3ee381706c21..c8df68e43498 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/WorkspaceItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/WorkspaceItemService.java @@ -11,6 +11,7 @@ import java.sql.SQLException; import java.util.List; import java.util.Map; +import java.util.UUID; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; @@ -55,6 +56,22 @@ public interface WorkspaceItemService extends InProgressSubmissionServicetrue, the workspace item starts as a copy + * of the collection's template item + * @return the newly created workspace item + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public WorkspaceItem create(Context context, Collection collection, UUID uuid, boolean template) + throws AuthorizeException, SQLException; + public WorkspaceItem create(Context c, WorkflowItem wfi) throws SQLException, AuthorizeException; @@ -110,10 +127,6 @@ public List findByCollection(Context context, Collection collecti public WorkspaceItem findByItem(Context context, Item item) throws SQLException; - public List findAllSupervisedItems(Context context) throws SQLException; - - public List findSupervisedItemsByEPerson(Context context, EPerson ePerson) throws SQLException; - /** * Get all workspace items in the whole system * diff --git a/dspace-api/src/main/java/org/dspace/content/virtual/UUIDValue.java b/dspace-api/src/main/java/org/dspace/content/virtual/UUIDValue.java index 252faf019c25..0b08cc309f86 100644 --- a/dspace-api/src/main/java/org/dspace/content/virtual/UUIDValue.java +++ b/dspace-api/src/main/java/org/dspace/content/virtual/UUIDValue.java @@ -8,7 +8,6 @@ package org.dspace.content.virtual; import java.sql.SQLException; -import java.util.LinkedList; import java.util.List; import org.dspace.content.Item; @@ -24,9 +23,7 @@ public class UUIDValue implements VirtualMetadataConfiguration { @Override public List getValues(Context context, Item item) throws SQLException { - List list = new LinkedList<>(); - list.add(String.valueOf(item.getID())); - return list; + return List.of(String.valueOf(item.getID())); } @Override diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java index 9f97dce1ce7e..32ad747d765e 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDAO.java @@ -155,12 +155,11 @@ public Query createQuery(Context context, String query) throws SQLException { * @return A list of distinct results as depicted by the CriteriaQuery and parameters * @throws SQLException */ - public List list(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class clazz, int maxResults, - int offset) throws SQLException { + public List list( + Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class clazz, int maxResults, int offset + ) throws SQLException { criteriaQuery.distinct(true); - @SuppressWarnings("unchecked") - List result = (List) executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset); - return result; + return executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset); } /** @@ -183,12 +182,12 @@ public List list(Context context, CriteriaQuery criteriaQuery, boolean cachea * @return A list of results determined by the CriteriaQuery and parameters * @throws SQLException */ - public List list(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class clazz, int maxResults, - int offset, boolean distinct) throws SQLException { + public List list( + Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class clazz, int maxResults, int offset, + boolean distinct + ) throws SQLException { criteriaQuery.distinct(distinct); - @SuppressWarnings("unchecked") - List result = (List) executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset); - return result; + return executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset); } /** @@ -204,6 +203,22 @@ public List list(Query query) { return result; } + /** + * This method will return a list of results for the given Query and parameters + * + * @param query The query for which the resulting list will be returned + * @param limit The maximum amount of results to be returned + * @param offset The offset to be used for the Query + * @return A list of results determined by the Query and parameters + */ + public List list(Query query, int limit, int offset) { + query.setFirstResult(offset); + query.setMaxResults(limit); + @SuppressWarnings("unchecked") + List result = (List) query.getResultList(); + return result; + } + /** * Retrieve a unique result from the query. If multiple results CAN be * retrieved an exception will be thrown, so only use when the criteria @@ -212,16 +227,14 @@ public List list(Query query) { * @param criteriaQuery JPA criteria * @param cacheable whether or not this query should be cacheable. * @param clazz type of object that should match the query. - * @param maxResults return at most this many results. - * @param offset skip this many leading results. * @return the single model object specified by the criteria, * or {@code null} if none match. * @throws java.sql.SQLException passed through. * @throws IllegalArgumentException if multiple objects match. */ - public T uniqueResult(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class clazz, - int maxResults, int offset) throws SQLException { - List list = list(context, criteriaQuery, cacheable, clazz, maxResults, offset); + public T uniqueResult(Context context, CriteriaQuery criteriaQuery, + boolean cacheable, Class clazz) throws SQLException { + List list = list(context, criteriaQuery, cacheable, clazz, -1, -1); if (CollectionUtils.isNotEmpty(list)) { if (list.size() == 1) { return list.get(0); diff --git a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java index 232431cac71c..e9c6b95b7f05 100644 --- a/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java +++ b/dspace-api/src/main/java/org/dspace/core/AbstractHibernateDSODAO.java @@ -48,7 +48,7 @@ public T findByLegacyId(Context context, int legacyId, Class clazz) throws SQ CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, clazz); Root root = criteriaQuery.from(clazz); criteriaQuery.where(criteriaBuilder.equal(root.get("legacyId"), legacyId)); - return uniqueResult(context, criteriaQuery, false, clazz, -1, -1); + return uniqueResult(context, criteriaQuery, false, clazz); } /** @@ -83,13 +83,14 @@ protected void addMetadataValueWhereQuery(StringBuilder query, List * The context object is also used as a cache for CM API objects. - * - * @version $Revision$ */ public class Context implements AutoCloseable { private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(Context.class); @@ -81,23 +81,28 @@ public class Context implements AutoCloseable { /** * A stack with the history of authorisation system check modify */ - private Stack authStateChangeHistory; + private Deque authStateChangeHistory; /** * A stack with the name of the caller class that modify authorisation * system check */ - private Stack authStateClassCallHistory; + private Deque authStateClassCallHistory; /** * Group IDs of special groups user is a member of */ - private List specialGroups; + private Set specialGroups; /** * Temporary store for the specialGroups when the current user is temporary switched */ - private List specialGroupsPreviousState; + private Set specialGroupsPreviousState; + + /** + * The currently used authentication method + */ + private String authenticationMethod; /** * Content events @@ -112,17 +117,22 @@ public class Context implements AutoCloseable { /** * Context mode */ - private Mode mode = Mode.READ_WRITE; + private Mode mode; /** * Cache that is only used the context is in READ_ONLY mode */ - private ContextReadOnlyCache readOnlyCache = new ContextReadOnlyCache(); + private final ContextReadOnlyCache readOnlyCache = new ContextReadOnlyCache(); protected EventService eventService; private DBConnection dbConnection; + /** + * The default administrator group + */ + private Group adminGroup; + public enum Mode { READ_ONLY, READ_WRITE, @@ -130,7 +140,6 @@ public enum Mode { } protected Context(EventService eventService, DBConnection dbConnection) { - this.mode = Mode.READ_WRITE; this.eventService = eventService; this.dbConnection = dbConnection; init(); @@ -142,7 +151,6 @@ protected Context(EventService eventService, DBConnection dbConnection) { * No user is authenticated. */ public Context() { - this.mode = Mode.READ_WRITE; init(); } @@ -159,8 +167,6 @@ public Context(Mode mode) { /** * Initializes a new context object. - * - * @throws SQLException if there was an error obtaining a database connection */ protected void init() { updateDatabase(); @@ -183,11 +189,15 @@ protected void init() { extraLogInfo = ""; ignoreAuth = false; - specialGroups = new ArrayList<>(); + specialGroups = new HashSet<>(); + + authStateChangeHistory = new ConcurrentLinkedDeque<>(); + authStateClassCallHistory = new ConcurrentLinkedDeque<>(); + + if (this.mode != null) { + setMode(this.mode); + } - authStateChangeHistory = new Stack<>(); - authStateClassCallHistory = new Stack<>(); - setMode(this.mode); } /** @@ -312,10 +322,10 @@ public void restoreAuthSystemState() { Boolean previousState; try { previousState = authStateChangeHistory.pop(); - } catch (EmptyStackException ex) { - log.warn(LogManager.getHeader(this, "restore_auth_sys_state", - "not previous state info available " - + ex.getLocalizedMessage())); + } catch (NoSuchElementException ex) { + log.warn(LogHelper.getHeader(this, "restore_auth_sys_state", + "not previous state info available: {}"), + ex::getLocalizedMessage); previousState = Boolean.FALSE; } if (log.isDebugEnabled()) { @@ -323,13 +333,19 @@ public void restoreAuthSystemState() { StackTraceElement[] stackTrace = currThread.getStackTrace(); String caller = stackTrace[stackTrace.length - 1].getClassName(); - String previousCaller = (String) authStateClassCallHistory.pop(); + String previousCaller; + try { + previousCaller = (String) authStateClassCallHistory.pop(); + } catch (NoSuchElementException ex) { + previousCaller = "none"; + log.warn(LogHelper.getHeader(this, "restore_auth_sys_state", + "no previous caller info available: {}"), + ex::getLocalizedMessage); + } // if previousCaller is not the current caller *only* log a warning if (!previousCaller.equals(caller)) { - log - .warn(LogManager - .getHeader( + log.warn(LogHelper.getHeader( this, "restore_auth_sys_state", "Class: " @@ -338,7 +354,7 @@ public void restoreAuthSystemState() { + previousCaller)); } } - ignoreAuth = previousState.booleanValue(); + ignoreAuth = previousState; } /** @@ -490,7 +506,7 @@ public void addEvent(Event event) { throw new IllegalStateException("Attempt to mutate object in read-only context"); } if (events == null) { - events = new LinkedList(); + events = new LinkedList<>(); } events.add(event); @@ -527,6 +543,36 @@ public Event pollEvent() { } } + /** + * Rollback the current transaction with the database, without persisting any + * pending changes. The database connection is not closed and can be reused + * afterwards. + * + * WARNING: After calling this method all previously fetched entities are + * "detached" (pending changes are not tracked anymore). You have to reload all + * entities you still want to work with manually after this method call (see + * {@link Context#reloadEntity(ReloadableEntity)}). + * + * @throws SQLException When rollbacking the transaction in the database fails. + */ + public void rollback() throws SQLException { + // If Context is no longer open/valid, just note that it has already been closed + if (!isValid()) { + log.info("rollback() was called on a closed Context object. No changes to abort."); + return; + } + + try { + // Rollback ONLY if we have a database transaction, and it is NOT Read Only + if (!isReadOnly() && isTransactionAlive()) { + dbConnection.rollback(); + reloadContextBoundEntities(); + } + } finally { + events = null; + } + } + /** * Close the context, without committing any of the changes performed using * this context. The database connection is freed. No exception is thrown if @@ -628,11 +674,7 @@ public void setSpecialGroup(UUID groupID) { * @return true if member */ public boolean inSpecialGroup(UUID groupID) { - if (specialGroups.contains(groupID)) { - return true; - } - - return false; + return specialGroups.contains(groupID); } /** @@ -642,7 +684,7 @@ public boolean inSpecialGroup(UUID groupID) { * @throws SQLException if database error */ public List getSpecialGroups() throws SQLException { - List myGroups = new ArrayList(); + List myGroups = new ArrayList<>(); for (UUID groupId : specialGroups) { myGroups.add(EPersonServiceFactory.getInstance().getGroupService().find(this, groupId)); } @@ -650,6 +692,15 @@ public List getSpecialGroups() throws SQLException { return myGroups; } + /** + * Get a set of all of the special groups uuids that current user is a member of. + * + * @return list of special groups uuids + */ + public Set getSpecialGroupUuids() { + return CollectionUtils.isEmpty(specialGroups) ? Set.of() : specialGroups; + } + /** * Temporary change the user bound to the context, empty the special groups that * are retained to allow subsequent restore @@ -667,12 +718,12 @@ public void switchContextUser(EPerson newUser) { currentUserPreviousState = currentUser; specialGroupsPreviousState = specialGroups; - specialGroups = new ArrayList(); + specialGroups = new HashSet<>(); currentUser = newUser; } /** - * Restore the user bound to the context and his special groups + * Restore the user bound to the context and their special groups * * @throws IllegalStateException if no switch was performed before */ @@ -709,11 +760,13 @@ public void shutDownDatabase() throws SQLException { /** - * Returns the size of the cache of all object that have been read from the database so far. A larger number - * means that more memory is consumed by the cache. This also has a negative impact on the query performance. In - * that case you should consider uncaching entities when they are no longer needed (see - * {@link Context#uncacheEntity(ReloadableEntity)} () uncacheEntity}). + * Returns the size of the cache of all object that have been read from the + * database so far. A larger number means that more memory is consumed by + * the cache. This also has a negative impact on the query performance. In + * that case you should consider uncaching entities when they are no longer + * needed (see {@link Context#uncacheEntity(ReloadableEntity)} () uncacheEntity}). * + * @return cache size. * @throws SQLException When connecting to the active cache fails. */ public long getCacheSize() throws SQLException { @@ -749,7 +802,7 @@ public void setMode(Mode newMode) { dbConnection.setConnectionMode(false, false); break; default: - log.warn("New context mode detected that has nog been configured."); + log.warn("New context mode detected that has not been configured."); break; } } catch (SQLException ex) { @@ -762,6 +815,15 @@ public void setMode(Mode newMode) { readOnlyCache.clear(); } + // When going to READ_ONLY, flush database changes to ensure that the current data is retrieved + if (newMode == Mode.READ_ONLY && mode != Mode.READ_ONLY) { + try { + dbConnection.flushSession(); + } catch (SQLException ex) { + log.warn("Unable to flush database changes after switching to READ_ONLY mode", ex); + } + } + //save the new mode mode = newMode; } @@ -772,7 +834,7 @@ public void setMode(Mode newMode) { * @return The current mode */ public Mode getCurrentMode() { - return mode; + return mode != null ? mode : Mode.READ_WRITE; } /** @@ -811,7 +873,7 @@ public boolean isBatchModeEnabled() { * entity. This means changes to the entity will be tracked and persisted to the database. * * @param entity The entity to reload - * @param The class of the enity. The entity must implement the {@link ReloadableEntity} interface. + * @param The class of the entity. The entity must implement the {@link ReloadableEntity} interface. * @return A (possibly) NEW reference to the entity that should be used for further processing. * @throws SQLException When reloading the entity from the database fails. */ @@ -824,7 +886,7 @@ public E reloadEntity(E entity) throws SQLException * Remove an entity from the cache. This is necessary when batch processing a large number of items. * * @param entity The entity to reload - * @param The class of the enity. The entity must implement the {@link ReloadableEntity} interface. + * @param The class of the entity. The entity must implement the {@link ReloadableEntity} interface. * @throws SQLException When reloading the entity from the database fails. */ @SuppressWarnings("unchecked") @@ -889,4 +951,29 @@ private void reloadContextBoundEntities() throws SQLException { currentUser = reloadEntity(currentUser); } + public String getAuthenticationMethod() { + return authenticationMethod; + } + + public void setAuthenticationMethod(final String authenticationMethod) { + this.authenticationMethod = authenticationMethod; + } + + /** + * Check if the user of the context is switched. + */ + public boolean isContextUserSwitched() { + return currentUserPreviousState != null; + } + + /** + * Returns the default "Administrator" group for DSpace administrators. + * The result is cached in the 'adminGroup' field, so it is only looked up once. + * This is done to improve performance, as this method is called quite often. + */ + public Group getAdminGroup() throws SQLException { + return (adminGroup == null) ? EPersonServiceFactory.getInstance() + .getGroupService() + .findByName(this, Group.ADMIN) : adminGroup; + } } diff --git a/dspace-api/src/main/java/org/dspace/core/DBConnection.java b/dspace-api/src/main/java/org/dspace/core/DBConnection.java index cb5825eec1d9..66e4a65dbfe1 100644 --- a/dspace-api/src/main/java/org/dspace/core/DBConnection.java +++ b/dspace-api/src/main/java/org/dspace/core/DBConnection.java @@ -148,4 +148,12 @@ public interface DBConnection { * @throws java.sql.SQLException passed through. */ public void uncacheEntity(E entity) throws SQLException; + + /** + * Do a manual flush. This synchronizes the in-memory state of the Session + * with the database (write changes to the database) + * + * @throws SQLException passed through. + */ + public void flushSession() throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/core/Email.java b/dspace-api/src/main/java/org/dspace/core/Email.java index 803497b650dc..f6df740a53ef 100644 --- a/dspace-api/src/main/java/org/dspace/core/Email.java +++ b/dspace-api/src/main/java/org/dspace/core/Email.java @@ -21,7 +21,6 @@ import java.util.Collections; import java.util.Date; import java.util.Enumeration; -import java.util.Iterator; import java.util.List; import java.util.Properties; import javax.activation.DataHandler; @@ -41,39 +40,55 @@ import javax.mail.internet.MimeMultipart; import javax.mail.internet.ParseException; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.velocity.Template; import org.apache.velocity.VelocityContext; import org.apache.velocity.app.Velocity; import org.apache.velocity.app.VelocityEngine; +import org.apache.velocity.exception.MethodInvocationException; +import org.apache.velocity.exception.ParseErrorException; +import org.apache.velocity.exception.ResourceNotFoundException; import org.apache.velocity.runtime.resource.loader.StringResourceLoader; import org.apache.velocity.runtime.resource.util.StringResourceRepository; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; /** - * Class representing an e-mail message, also used to send e-mails. + * Class representing an e-mail message. The {@link send} method causes the + * assembled message to be formatted and sent. *

    * Typical use: - *

    + *
    + * Email email = Email.getEmail(path);
    + * email.addRecipient("foo@bar.com");
    + * email.addArgument("John");
    + * email.addArgument("On the Testing of DSpace");
    + * email.send();
    + * 
    + * {@code path} is the filesystem path of an email template, typically in + * {@code ${dspace.dir}/config/emails/} and can include the subject -- see + * below. Templates are processed by + * Apache Velocity. They may contain VTL directives and property + * placeholders. + *

    + * {@link addArgument(string)} adds a property to the {@code params} array + * in the Velocity context, which can be used to replace placeholder tokens + * in the message. These arguments are indexed by number in the order they were + * added to the message. *

    - * Email email = new Email();
    - * email.addRecipient("foo@bar.com");
    - * email.addArgument("John");
    - * email.addArgument("On the Testing of DSpace");
    - * email.send();
    - *

    + * The DSpace configuration properties are also available to templates as the + * array {@code config}, indexed by name. Example: {@code ${config.get('dspace.name')}} *

    - * name is the name of an email template in - * dspace-dir/config/emails/ (which also includes the subject.) - * arg0 and arg1 are arguments to fill out the - * message with. - *

    - * Emails are formatted using Apache Velocity. Headers such as Subject may be - * supplied by the template, by defining them using #set(). Example: - *

    + * Recipients and attachments may be added as needed. See {@link addRecipient}, + * {@link addAttachment(File, String)}, and + * {@link addAttachment(InputStream, String, String)}. + *

    + * Headers such as Subject may be supplied by the template, by defining them + * using the VTL directive {@code #set()}. Only headers named in the DSpace + * configuration array property {@code mail.message.headers} will be added. + *

    + * Example: * *

      *
    @@ -88,12 +103,14 @@
      *
      *     Thank you for sending us your submission "${params[1]}".
      *
    + *     --
    + *     The ${config.get('dspace.name')} Team
    + *
      * 
    * *

    * If the example code above was used to send this mail, the resulting mail * would have the subject Example e-mail and the body would be: - *

    * *
      *
    @@ -102,7 +119,16 @@
      *
      *     Thank you for sending us your submission "On the Testing of DSpace".
      *
    + *     --
    + *     The DSpace Team
    + *
      * 
    + *

    + * There are two ways to load a message body. One can create an instance of + * {@link Email} and call {@link setContent} on it, passing the body as a String. Or + * one can use the static factory method {@link getEmail} to load a file by its + * complete filesystem path. In either case the text will be loaded into a + * Velocity template. * * @author Robert Tansley * @author Jim Downing - added attachment handling code @@ -112,7 +138,6 @@ public class Email { /** * The content of the message */ - private String content; private String contentName; /** @@ -149,14 +174,14 @@ public class Email { private static final String RESOURCE_REPOSITORY_NAME = "Email"; private static final Properties VELOCITY_PROPERTIES = new Properties(); static { - VELOCITY_PROPERTIES.put(Velocity.RESOURCE_LOADER, "string"); - VELOCITY_PROPERTIES.put("string.resource.loader.description", + VELOCITY_PROPERTIES.put(Velocity.RESOURCE_LOADERS, "string"); + VELOCITY_PROPERTIES.put("resource.loader.string.description", "Velocity StringResource loader"); - VELOCITY_PROPERTIES.put("string.resource.loader.class", + VELOCITY_PROPERTIES.put("resource.loader.string.class", StringResourceLoader.class.getName()); - VELOCITY_PROPERTIES.put("string.resource.loader.repository.name", + VELOCITY_PROPERTIES.put("resource.loader.string.repository.name", RESOURCE_REPOSITORY_NAME); - VELOCITY_PROPERTIES.put("string.resource.loader.repository.static", + VELOCITY_PROPERTIES.put("resource.loader.string.repository.static", "false"); } @@ -173,13 +198,12 @@ public Email() { moreAttachments = new ArrayList<>(10); subject = ""; template = null; - content = ""; replyTo = null; charset = null; } /** - * Add a recipient + * Add a recipient. * * @param email the recipient's email address */ @@ -193,16 +217,24 @@ public void addRecipient(String email) { * "Subject:" line must be stripped. * * @param name a name for this message body - * @param cnt the content of the message + * @param content the content of the message */ - public void setContent(String name, String cnt) { - content = cnt; + public void setContent(String name, String content) { contentName = name; arguments.clear(); + + VelocityEngine templateEngine = new VelocityEngine(); + templateEngine.init(VELOCITY_PROPERTIES); + + StringResourceRepository repo = (StringResourceRepository) + templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME); + repo.putStringResource(contentName, content); + // Turn content into a template. + template = templateEngine.getTemplate(contentName); } /** - * Set the subject of the message + * Set the subject of the message. * * @param s the subject of the message */ @@ -211,7 +243,7 @@ public void setSubject(String s) { } /** - * Set the reply-to email address + * Set the reply-to email address. * * @param email the reply-to email address */ @@ -220,7 +252,7 @@ public void setReplyTo(String email) { } /** - * Fill out the next argument in the template + * Fill out the next argument in the template. * * @param arg the value for the next argument */ @@ -228,6 +260,13 @@ public void addArgument(Object arg) { arguments.add(arg); } + /** + * Add an attachment bodypart to the message from an external file. + * + * @param f reference to a file to be attached. + * @param name a name for the resulting bodypart in the message's MIME + * structure. + */ public void addAttachment(File f, String name) { attachments.add(new FileAttachment(f, name)); } @@ -235,6 +274,17 @@ public void addAttachment(File f, String name) { /** When given a bad MIME type for an attachment, use this instead. */ private static final String DEFAULT_ATTACHMENT_TYPE = "application/octet-stream"; + /** + * Add an attachment bodypart to the message from a byte stream. + * + * @param is the content of this stream will become the content of the + * bodypart. + * @param name a name for the resulting bodypart in the message's MIME + * structure. + * @param mimetype the MIME type of the resulting bodypart, such as + * "text/pdf". If {@code null} it will default to + * "application/octet-stream", which is MIME for "unknown format". + */ public void addAttachment(InputStream is, String name, String mimetype) { if (null == mimetype) { LOG.error("Null MIME type replaced with '" + DEFAULT_ATTACHMENT_TYPE @@ -254,6 +304,11 @@ public void addAttachment(InputStream is, String name, String mimetype) { moreAttachments.add(new InputStreamAttachment(is, name, mimetype)); } + /** + * Set the character set of the message. + * + * @param cs the name of a character set, such as "UTF-8" or "EUC-JP". + */ public void setCharset(String cs) { charset = cs; } @@ -277,15 +332,20 @@ public void reset() { * {@code mail.message.headers} then that name and its value will be added * to the message's headers. * - *

    "subject" is treated specially: if {@link setSubject()} has not been called, - * the value of any "subject" property will be used as if setSubject had - * been called with that value. Thus a template may define its subject, but - * the caller may override it. + *

    "subject" is treated specially: if {@link setSubject()} has not been + * called, the value of any "subject" property will be used as if setSubject + * had been called with that value. Thus a template may define its subject, + * but the caller may override it. * * @throws MessagingException if there was a problem sending the mail. * @throws IOException if IO error */ public void send() throws MessagingException, IOException { + if (null == template) { + // No template -- no content -- PANIC!!! + throw new MessagingException("Email has no body"); + } + ConfigurationService config = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -305,36 +365,26 @@ public void send() throws MessagingException, IOException { MimeMessage message = new MimeMessage(session); // Set the recipients of the message - Iterator i = recipients.iterator(); - - while (i.hasNext()) { - message.addRecipient(Message.RecipientType.TO, new InternetAddress( - i.next())); + for (String recipient : recipients) { + message.addRecipient(Message.RecipientType.TO, + new InternetAddress(recipient)); } + // Get headers defined by the template. + String[] templateHeaders = config.getArrayProperty("mail.message.headers"); // Format the mail message body - VelocityEngine templateEngine = new VelocityEngine(); - templateEngine.init(VELOCITY_PROPERTIES); - VelocityContext vctx = new VelocityContext(); vctx.put("config", new UnmodifiableConfigurationService(config)); vctx.put("params", Collections.unmodifiableList(arguments)); - if (null == template) { - if (StringUtils.isBlank(content)) { - // No template and no content -- PANIC!!! - throw new MessagingException("Email has no body"); - } - // No template, so use a String of content. - StringResourceRepository repo = (StringResourceRepository) - templateEngine.getApplicationAttribute(RESOURCE_REPOSITORY_NAME); - repo.putStringResource(contentName, content); - // Turn content into a template. - template = templateEngine.getTemplate(contentName); - } - StringWriter writer = new StringWriter(); - template.merge(vctx, writer); + try { + template.merge(vctx, writer); + } catch (MethodInvocationException | ParseErrorException + | ResourceNotFoundException ex) { + LOG.error("Template not merged: {}", ex.getMessage()); + throw new MessagingException("Template not merged", ex); + } String fullMessage = writer.toString(); // Set some message header fields @@ -342,11 +392,10 @@ public void send() throws MessagingException, IOException { message.setSentDate(date); message.setFrom(new InternetAddress(from)); - // Get headers defined by the template. - for (String headerName : config.getArrayProperty("mail.message.headers")) { + for (String headerName : templateHeaders) { String headerValue = (String) vctx.get(headerName); if ("subject".equalsIgnoreCase(headerName)) { - if (null != subject) { + if (null != headerValue) { subject = headerValue; } } else if ("charset".equalsIgnoreCase(headerName)) { @@ -394,7 +443,8 @@ public void send() throws MessagingException, IOException { // add the stream messageBodyPart = new MimeBodyPart(); messageBodyPart.setDataHandler(new DataHandler( - new InputStreamDataSource(attachment.name,attachment.mimetype,attachment.is))); + new InputStreamDataSource(attachment.name, + attachment.mimetype, attachment.is))); messageBodyPart.setFileName(attachment.name); multipart.addBodyPart(messageBodyPart); } @@ -436,6 +486,9 @@ public void send() throws MessagingException, IOException { /** * Get the VTL template for an email message. The message is suitable * for inserting values using Apache Velocity. + *

    + * Note that everything is stored here, so that only send() throws a + * MessagingException. * * @param emailFile * full name for the email template, for example "/dspace/config/emails/register". @@ -473,15 +526,6 @@ public static Email getEmail(String emailFile) } return email; } - /* - * Implementation note: It might be necessary to add a quick utility method - * like "send(to, subject, message)". We'll see how far we get without it - - * having all emails as templates in the config allows customisation and - * internationalisation. - * - * Note that everything is stored and the run in send() so that only send() - * throws a MessagingException. - */ /** * Test method to send an email to check email server settings @@ -536,7 +580,7 @@ public static void main(String[] args) { } /** - * Utility struct class for handling file attachments. + * Utility record class for handling file attachments. * * @author ojd20 */ @@ -552,7 +596,7 @@ public FileAttachment(File f, String n) { } /** - * Utility struct class for handling file attachments. + * Utility record class for handling file attachments. * * @author Adán Román Ruiz at arvo.es */ @@ -569,13 +613,23 @@ public InputStreamAttachment(InputStream is, String name, String mimetype) { } /** + * Wrap an {@link InputStream} in a {@link DataSource}. + * * @author arnaldo */ - public class InputStreamDataSource implements DataSource { + public static class InputStreamDataSource implements DataSource { private final String name; private final String contentType; private final ByteArrayOutputStream baos; + /** + * Consume the content of an InputStream and store it in a local buffer. + * + * @param name give the DataSource a name. + * @param contentType the DataSource contains this type of data. + * @param inputStream content to be buffered in the DataSource. + * @throws IOException if the stream cannot be read. + */ InputStreamDataSource(String name, String contentType, InputStream inputStream) throws IOException { this.name = name; this.contentType = contentType; @@ -612,7 +666,7 @@ public OutputStream getOutputStream() throws IOException { * Wrap ConfigurationService to prevent templates from modifying * the configuration. */ - public class UnmodifiableConfigurationService { + public static class UnmodifiableConfigurationService { private final ConfigurationService configurationService; /** diff --git a/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java b/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java index 3321e4d837e5..b371af80eede 100644 --- a/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java +++ b/dspace-api/src/main/java/org/dspace/core/HibernateDBConnection.java @@ -337,4 +337,17 @@ public void uncacheEntity(E entity) throws SQLExcep } } } + + /** + * Do a manual flush. This synchronizes the in-memory state of the Session + * with the database (write changes to the database) + * + * @throws SQLException passed through. + */ + @Override + public void flushSession() throws SQLException { + if (getSession().isDirty()) { + getSession().flush(); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/core/I18nUtil.java b/dspace-api/src/main/java/org/dspace/core/I18nUtil.java index cd0609e29f14..0fc48b908b82 100644 --- a/dspace-api/src/main/java/org/dspace/core/I18nUtil.java +++ b/dspace-api/src/main/java/org/dspace/core/I18nUtil.java @@ -217,12 +217,11 @@ public static Locale getSupportedLocale(String locale) { */ public static String getInputFormsFileName(Locale locale) { /** Name of the form definition XML file */ - String fileName = ""; final String FORM_DEF_FILE = "submission-forms"; final String FILE_TYPE = ".xml"; String defsFilename = DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("dspace.dir") + File.separator + "config" + File.separator + FORM_DEF_FILE; - fileName = getFilename(locale, defsFilename, FILE_TYPE); + String fileName = getFilename(locale, defsFilename, FILE_TYPE); return fileName; } @@ -286,14 +285,13 @@ public static String getMessage(String key, Context c) { */ public static String getDefaultLicense(Context context) { Locale locale = context.getCurrentLocale(); - String fileName = ""; /** Name of the default license */ final String DEF_LIC_FILE = "default"; final String FILE_TYPE = ".license"; String defsFilename = DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("dspace.dir") + File.separator + "config" + File.separator + DEF_LIC_FILE; - fileName = getFilename(locale, defsFilename, FILE_TYPE); + String fileName = getFilename(locale, defsFilename, FILE_TYPE); return fileName; } @@ -316,18 +314,17 @@ private static String getFilename(Locale locale, String fileName, String fileTyp // with Language, Country String fileNameLC = null; // with Language - String fileNameL = null; - fileNameL = fileName + "_" + locale.getLanguage(); + String fileNameL = fileName + "_" + locale.getLanguage(); if (fileType == null) { fileType = ""; } - if (!("".equals(locale.getCountry()))) { + if (!locale.getCountry().isEmpty()) { fileNameLC = fileName + "_" + locale.getLanguage() + "_" + locale.getCountry(); - if (!("".equals(locale.getVariant()))) { + if (!locale.getVariant().isEmpty()) { fileNameLCV = fileName + "_" + locale.getLanguage() + "_" + locale.getCountry() + "_" + locale.getVariant(); } @@ -349,7 +346,7 @@ private static String getFilename(Locale locale, String fileName, String fileTyp } } - if (fileNameL != null && !fileFound) { + if (!fileFound) { File fileTmp = new File(fileNameL + fileType); if (fileTmp.exists()) { fileFound = true; @@ -372,12 +369,11 @@ private static String getFilename(Locale locale, String fileName, String fileTyp * String - localized filename of an email template */ public static String getEmailFilename(Locale locale, String name) { - String templateName = ""; String templateFile = DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("dspace.dir") + File.separator + "config" + File.separator + "emails" + File.separator + name; - templateName = getFilename(locale, templateFile, ""); + String templateName = getFilename(locale, templateFile, ""); return templateName; } @@ -389,7 +385,7 @@ public static String getEmailFilename(Locale locale, String name) { * @return array of locale results, possibly empty */ public static Locale[] parseLocales(String[] locales) { - List resultList = new ArrayList(); + List resultList = new ArrayList<>(); for (String ls : locales) { Locale lc = makeLocale(ls); if (lc != null) { diff --git a/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java b/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java index 7bbbd91d0aad..e92ea137f31f 100644 --- a/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/core/LegacyPluginServiceImpl.java @@ -10,7 +10,6 @@ import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; -import java.io.Serializable; import java.lang.reflect.Array; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; @@ -173,7 +172,7 @@ public Object[] getPluginSequence(Class interfaceClass) throws PluginInstantiationException { // cache of config data for Sequence Plugins; format its // -> [ .. ] (value is Array) - Map sequenceConfig = new HashMap(); + Map sequenceConfig = new HashMap<>(); // cache the configuration for this interface after grovelling it once: // format is prefix. = @@ -220,10 +219,7 @@ private Object getAnonymousPlugin(String classname) // Map of named plugin classes, [intfc,name] -> class // Also contains intfc -> "marker" to mark when interface has been loaded. - private Map namedPluginClasses = new HashMap(); - - // Map of cached (reusable) named plugin instances, [class,name] -> instance - private Map namedInstanceCache = new HashMap(); + private final Map namedPluginClasses = new HashMap<>(); // load and cache configuration data for the given interface. private void configureNamedPlugin(String iname) @@ -413,14 +409,14 @@ public String[] getAllPluginNames(Class interfaceClass) { String iname = interfaceClass.getName(); configureNamedPlugin(iname); String prefix = iname + SEP; - ArrayList result = new ArrayList(); + ArrayList result = new ArrayList<>(); for (String key : namedPluginClasses.keySet()) { if (key.startsWith(prefix)) { result.add(key.substring(prefix.length())); } } - if (result.size() == 0) { + if (result.isEmpty()) { log.error("Cannot find any names for named plugin, interface=" + iname); } @@ -508,10 +504,10 @@ public void checkConfiguration() */ // tables of config keys for each type of config line: - Map singleKey = new HashMap(); - Map sequenceKey = new HashMap(); - Map namedKey = new HashMap(); - Map selfnamedKey = new HashMap(); + Map singleKey = new HashMap<>(); + Map sequenceKey = new HashMap<>(); + Map namedKey = new HashMap<>(); + Map selfnamedKey = new HashMap<>(); // Find all property keys starting with "plugin." List keys = configurationService.getPropertyKeys("plugin."); @@ -533,7 +529,7 @@ public void checkConfiguration() // 2. Build up list of all interfaces and test that they are loadable. // don't bother testing that they are "interface" rather than "class" // since either one will work for the Plugin Manager. - ArrayList allInterfaces = new ArrayList(); + ArrayList allInterfaces = new ArrayList<>(); allInterfaces.addAll(singleKey.keySet()); allInterfaces.addAll(sequenceKey.keySet()); allInterfaces.addAll(namedKey.keySet()); @@ -547,7 +543,6 @@ public void checkConfiguration() // - each class is loadable. // - plugin.selfnamed values are each subclass of SelfNamedPlugin // - save classname in allImpls - Map allImpls = new HashMap(); // single plugins - just check that it has a valid impl. class ii = singleKey.keySet().iterator(); @@ -558,9 +553,6 @@ public void checkConfiguration() log.error("Single plugin config not found for: " + SINGLE_PREFIX + key); } else { val = val.trim(); - if (checkClassname(val, "implementation class")) { - allImpls.put(val, val); - } } } @@ -571,12 +563,6 @@ public void checkConfiguration() String[] vals = configurationService.getArrayProperty(SEQUENCE_PREFIX + key); if (vals == null || vals.length == 0) { log.error("Sequence plugin config not found for: " + SEQUENCE_PREFIX + key); - } else { - for (String val : vals) { - if (checkClassname(val, "implementation class")) { - allImpls.put(val, val); - } - } } } @@ -591,7 +577,6 @@ public void checkConfiguration() } else { for (String val : vals) { if (checkClassname(val, "selfnamed implementation class")) { - allImpls.put(val, val); checkSelfNamed(val); } } @@ -609,15 +594,6 @@ public void checkConfiguration() log.error("Named plugin config not found for: " + NAMED_PREFIX + key); } else { checkNames(key); - for (String val : vals) { - // each named plugin has two parts to the value, format: - // [classname] = [plugin-name] - String val_split[] = val.split("\\s*=\\s*"); - String classname = val_split[0]; - if (checkClassname(classname, "implementation class")) { - allImpls.put(classname, classname); - } - } } } } diff --git a/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java b/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java index 8324105a3085..d895f9a76481 100644 --- a/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/core/LicenseServiceImpl.java @@ -17,9 +17,12 @@ import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; +import javax.servlet.http.HttpServletRequest; import org.dspace.core.service.LicenseService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.services.model.Request; +import org.dspace.web.ContextUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -101,13 +104,14 @@ public String getLicenseText(String licenseFile) { /** * Get the site-wide default license that submitters need to grant * + * Localized license requires: default_{{locale}}.license file. + * Locale also must be listed in webui.supported.locales setting. + * * @return the default license */ @Override public String getDefaultSubmissionLicense() { - if (null == license) { - init(); - } + init(); return license; } @@ -115,9 +119,8 @@ public String getDefaultSubmissionLicense() { * Load in the default license. */ protected void init() { - File licenseFile = new File( - DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("dspace.dir") - + File.separator + "config" + File.separator + "default.license"); + Context context = obtainContext(); + File licenseFile = new File(I18nUtil.getDefaultLicense(context)); FileInputStream fir = null; InputStreamReader ir = null; @@ -169,4 +172,24 @@ protected void init() { } } } + + /** + * Obtaining current request context. + * Return new context if getting one from current request failed. + * + * @return DSpace context object + */ + private Context obtainContext() { + try { + Request currentRequest = DSpaceServicesFactory.getInstance().getRequestService().getCurrentRequest(); + if (currentRequest != null) { + HttpServletRequest request = currentRequest.getHttpServletRequest(); + return ContextUtil.obtainContext(request); + } + } catch (Exception e) { + log.error("Can't load current request context."); + } + + return new Context(); + } } diff --git a/dspace-api/src/main/java/org/dspace/core/LogHelper.java b/dspace-api/src/main/java/org/dspace/core/LogHelper.java new file mode 100644 index 000000000000..00cc0f27664b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/core/LogHelper.java @@ -0,0 +1,93 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +import org.dspace.eperson.EPerson; + +/** + * Class for generating standard log header + * + * @author David Stuve + * @author Robert Tansley + * @version $Revision$ + */ +public class LogHelper { + + /** + * Default constructor + */ + private LogHelper() { } + + /** + * Generate the log header + * + * @param context the current Context - safe to pass in null + * @param action string describing the action + * @param extrainfo string with extra information, like parameters + * @return the filled out log header + */ + public static String getHeader(Context context, String action, + String extrainfo) { + String email = "anonymous"; + String contextExtraInfo; + + if (context != null) { + EPerson e = context.getCurrentUser(); + + if (e != null) { + email = e.getEmail(); + } + + contextExtraInfo = context.getExtraLogInfo(); + } else { + contextExtraInfo = "no_context"; + } + + + StringBuilder result = new StringBuilder(); + // Escape everthing but the extra context info because for some crazy reason two fields + // are generated inside this entry one for the session id, and another for the ip + // address. Everything else should be escaped. + result.append(escapeLogField(email)).append(":").append(contextExtraInfo).append(":") + .append(escapeLogField(action)).append(":").append(escapeLogField(extrainfo)); + return result.toString(); + } + + + /** + * If any string within the log line contains a field separator (:) they need to be escaped so as the + * line may be parsed and analysed later. This method will escape a log field. + * + * Single slashes and colons will be escaped so that colons no longer appear in the logs + * + * @param field The unescaped log field + * @return An escaped log field + */ + public static String escapeLogField(String field) { + if (field != null) { + field = field.replaceAll("\\\\", "\\\\\\\\;"); + field = field.replaceAll(":", "\\\\colon;"); + } + return field; + } + + /** + * Unescape a log field. + * + * @param field The escaped log field + * @return the original log field + */ + public static String unescapeLogField(String field) { + + if (field != null) { + field = field.replaceAll("\\\\colon;", ":"); + field = field.replaceAll("\\\\\\\\;", "\\\\"); + } + return field; + } +} diff --git a/dspace-api/src/main/java/org/dspace/core/LogManager.java b/dspace-api/src/main/java/org/dspace/core/LogManager.java deleted file mode 100644 index c8988ca997b4..000000000000 --- a/dspace-api/src/main/java/org/dspace/core/LogManager.java +++ /dev/null @@ -1,93 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.core; - -import org.dspace.eperson.EPerson; - -/** - * Class for generating standard log header - * - * @author David Stuve - * @author Robert Tansley - * @version $Revision$ - */ -public class LogManager { - - /** - * Default constructor - */ - private LogManager() { } - - /** - * Generate the log header - * - * @param context the current Context - safe to pass in null - * @param action string describing the action - * @param extrainfo string with extra information, like parameters - * @return the filled out log header - */ - public static String getHeader(Context context, String action, - String extrainfo) { - String email = "anonymous"; - String contextExtraInfo; - - if (context != null) { - EPerson e = context.getCurrentUser(); - - if (e != null) { - email = e.getEmail(); - } - - contextExtraInfo = context.getExtraLogInfo(); - } else { - contextExtraInfo = "no_context"; - } - - - StringBuilder result = new StringBuilder(); - // Escape everthing but the extra context info because for some crazy reason two fields - // are generated inside this entry one for the session id, and another for the ip - // address. Everything else should be escaped. - result.append(escapeLogField(email)).append(":").append(contextExtraInfo).append(":") - .append(escapeLogField(action)).append(":").append(escapeLogField(extrainfo)); - return result.toString(); - } - - - /** - * If any string within the log line contains a field separator (:) they need to be escaped so as the - * line may be parsed and analysed later. This method will escape a log field. - * - * Single slashes and colons will be escaped so that colons no longer appear in the logs - * - * @param field The unescaped log field - * @return An escaped log field - */ - public static String escapeLogField(String field) { - if (field != null) { - field = field.replaceAll("\\\\", "\\\\\\\\;"); - field = field.replaceAll(":", "\\\\colon;"); - } - return field; - } - - /** - * Unescape a log field. - * - * @param field The escaped log field - * @return the original log field - */ - public static String unescapeLogField(String field) { - - if (field != null) { - field = field.replaceAll("\\\\colon;", ":"); - field = field.replaceAll("\\\\\\\\;", "\\\\"); - } - return field; - } -} diff --git a/dspace-api/src/main/java/org/dspace/core/Utils.java b/dspace-api/src/main/java/org/dspace/core/Utils.java index cb8714b80b2f..ea9ed57eca04 100644 --- a/dspace-api/src/main/java/org/dspace/core/Utils.java +++ b/dspace-api/src/main/java/org/dspace/core/Utils.java @@ -16,10 +16,9 @@ import java.net.Inet4Address; import java.net.InetAddress; import java.net.MalformedURLException; -import java.net.URI; -import java.net.URISyntaxException; import java.net.URL; import java.net.UnknownHostException; +import java.nio.charset.StandardCharsets; import java.rmi.dgc.VMID; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -38,6 +37,7 @@ import com.coverity.security.Escape; import org.apache.commons.lang3.StringUtils; import org.apache.commons.text.StringSubstitutor; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -46,13 +46,12 @@ * Utility functions for DSpace. * * @author Peter Breton - * @version $Revision$ */ public final class Utils { /** * log4j logger */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Utils.class); + private static final Logger log = LogManager.getLogger(Utils.class); private static final Pattern DURATION_PATTERN = Pattern .compile("(\\d+)([smhdwy])"); @@ -71,12 +70,12 @@ public final class Utils { private static int counter = 0; - private static Random random = new Random(); + private static final Random random = new Random(); - private static VMID vmid = new VMID(); + private static final VMID vmid = new VMID(); // for parseISO8601Date - private static SimpleDateFormat parseFmt[] = { + private static final SimpleDateFormat[] parseFmt = { // first try at parsing, has milliseconds (note General time zone) new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSSz"), @@ -86,17 +85,21 @@ public final class Utils { // finally, try without any timezone (defaults to current TZ) new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSS"), - new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss") + new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss"), + + new SimpleDateFormat("yyyy'-'MM'-'dd") }; // for formatISO8601Date // output canonical format (note RFC22 time zone, easier to hack) - private static SimpleDateFormat outFmtSecond = new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ssZ"); + private static final SimpleDateFormat outFmtSecond + = new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ssZ"); // output format with millsecond precision - private static SimpleDateFormat outFmtMillisec = new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSSZ"); + private static final SimpleDateFormat outFmtMillisec + = new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSSZ"); - private static Calendar outCal = GregorianCalendar.getInstance(); + private static final Calendar outCal = GregorianCalendar.getInstance(); /** * Private constructor @@ -110,7 +113,7 @@ private Utils() { } * @return MD5 checksum for the data in hex format. */ public static String getMD5(String data) { - return getMD5(data.getBytes()); + return getMD5(data.getBytes(StandardCharsets.UTF_8)); } /** @@ -153,14 +156,14 @@ public static String toHex(byte[] data) { return null; } - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); // This is far from the most efficient way to do things... - for (int i = 0; i < data.length; i++) { - int low = (int) (data[i] & 0x0F); - int high = (int) (data[i] & 0xF0); + for (byte datum : data) { + int low = datum & 0x0F; + int high = datum & 0xF0; - result.append(Integer.toHexString(high).substring(0, 1)); + result.append(Integer.toHexString(high).charAt(0)); result.append(Integer.toHexString(low)); } @@ -196,11 +199,9 @@ public static synchronized byte[] generateBytesKey() { byte[] junk = new byte[16]; random.nextBytes(junk); + String input = String.valueOf(vmid) + new Date() + Arrays.toString(junk) + counter++; - String input = new StringBuffer().append(vmid).append( - new java.util.Date()).append(Arrays.toString(junk)).append(counter++).toString(); - - return getMD5Bytes(input.getBytes()); + return getMD5Bytes(input.getBytes(StandardCharsets.UTF_8)); } // The following two methods are taken from the Jakarta IOUtil class. @@ -287,7 +288,7 @@ public static long parseDuration(String duration) throws ParseException { } String units = m.group(2); - long multiplier = MS_IN_SECOND; + long multiplier; if ("s".equals(units)) { multiplier = MS_IN_SECOND; @@ -327,16 +328,16 @@ public static synchronized Date parseISO8601Date(String s) { char tzSign = s.charAt(s.length() - 6); if (s.endsWith("Z")) { s = s.substring(0, s.length() - 1) + "GMT+00:00"; - } else if (tzSign == '-' || tzSign == '+') { + } else if ((tzSign == '-' || tzSign == '+') && s.length() > 10) { // check for trailing timezone s = s.substring(0, s.length() - 6) + "GMT" + s.substring(s.length() - 6); } // try to parse without milliseconds ParseException lastError = null; - for (int i = 0; i < parseFmt.length; ++i) { + for (SimpleDateFormat simpleDateFormat : parseFmt) { try { - return parseFmt[i].parse(s); + return simpleDateFormat.parse(s); } catch (ParseException e) { lastError = e; } @@ -369,7 +370,7 @@ public static synchronized String formatISO8601Date(Date d) { } public static java.util.Collection emptyIfNull(java.util.Collection collection) { - return collection == null ? Collections.emptyList() : collection; + return collection == null ? Collections.emptyList() : collection; } /** @@ -412,7 +413,9 @@ public static String[] tokenize(String metadata) { * @return metadata field key */ public static String standardize(String schema, String element, String qualifier, String separator) { - if (StringUtils.isBlank(qualifier)) { + if (StringUtils.isBlank(element)) { + return null; + } else if (StringUtils.isBlank(qualifier)) { return schema + separator + element; } else { return schema + separator + element + separator + qualifier; @@ -444,14 +447,14 @@ public static String getBaseUrl(String urlString) { */ public static String getHostName(String uriString) { try { - URI uri = new URI(uriString); - String hostname = uri.getHost(); + URL url = new URL(uriString); + String hostname = url.getHost(); // remove the "www." from hostname, if it exists if (hostname != null) { return hostname.startsWith("www.") ? hostname.substring(4) : hostname; } - return hostname; - } catch (URISyntaxException e) { + return null; + } catch (MalformedURLException e) { return null; } } diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/CitationPage.java b/dspace-api/src/main/java/org/dspace/ctask/general/CitationPage.java new file mode 100644 index 000000000000..fa630029b890 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/ctask/general/CitationPage.java @@ -0,0 +1,284 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.ctask.general; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.BundleService; +import org.dspace.core.Context; +import org.dspace.curate.AbstractCurationTask; +import org.dspace.curate.Curator; +import org.dspace.curate.Distributive; +import org.dspace.curate.Mutative; +import org.dspace.disseminate.factory.DisseminateServiceFactory; +import org.dspace.disseminate.service.CitationDocumentService; + +/** + * CitationPage + * + * This task is used to generate a cover page with citation information for text + * documents and then to add that cover page to a PDF version of the document + * replacing the originally uploaded document form the user's perspective. + * + * @author Ryan McGowan + */ + +@Distributive +@Mutative +public class CitationPage extends AbstractCurationTask { + /** + * Class Logger + */ + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(CitationPage.class); + + protected int status = Curator.CURATE_UNSET; + protected String result = null; + /** + * A StringBuilder to handle result string building process. + */ + protected StringBuilder resBuilder; + + + /** + * The name to give the bundle we add the cited pages to. + */ + protected static final String DISPLAY_BUNDLE_NAME = "DISPLAY"; + /** + * The name of the bundle to move source documents into after they have been + * cited. + */ + protected static final String PRESERVATION_BUNDLE_NAME = "PRESERVATION"; + + protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService(); + protected ResourcePolicyService resourcePolicyService = AuthorizeServiceFactory.getInstance() + .getResourcePolicyService(); + + private Map displayMap = new HashMap(); + + /** + * {@inheritDoc} + * + * @see CurationTask#perform(DSpaceObject) + */ + @Override + public int perform(DSpaceObject dso) throws IOException { + + // Deal with status and result as well as call distribute. + this.resBuilder = new StringBuilder(); + this.distribute(dso); + this.result = this.resBuilder.toString(); + this.setResult(this.result); + this.report(this.result); + + return this.status; + } + + /** + * {@inheritDoc} + * + * @see AbstractCurationTask#performItem(Item) + */ + @Override + protected void performItem(Item item) throws SQLException { + //Determine if the DISPLAY bundle exits. If not, create it. + List dBundles = itemService.getBundles(item, CitationPage.DISPLAY_BUNDLE_NAME); + Bundle original = itemService.getBundles(item, "ORIGINAL").get(0); + Bundle dBundle = null; + if (dBundles == null || dBundles.isEmpty()) { + try { + dBundle = bundleService.create(Curator.curationContext(), item, CitationPage.DISPLAY_BUNDLE_NAME); + // don't inherit now otherwise they will be copied over the moved bitstreams + resourcePolicyService.removeAllPolicies(Curator.curationContext(), dBundle); + } catch (AuthorizeException e) { + log.error("User not authroized to create bundle on item \"{}\": {}", + item::getName, e::getMessage); + return; + } + } else { + dBundle = dBundles.get(0); + } + + //Create a map of the bitstreams in the displayBundle. This is used to + //check if the bundle being cited is already in the display bundle. + for (Bitstream bs : dBundle.getBitstreams()) { + displayMap.put(bs.getName(), bs); + } + + //Determine if the preservation bundle exists and add it if we need to. + //Also, set up bundles so it contains all ORIGINAL and PRESERVATION + //bitstreams. + List pBundles = itemService.getBundles(item, CitationPage.PRESERVATION_BUNDLE_NAME); + Bundle pBundle = null; + List bundles = new ArrayList<>(); + if (pBundles != null && !pBundles.isEmpty()) { + pBundle = pBundles.get(0); + bundles.addAll(itemService.getBundles(item, "ORIGINAL")); + bundles.addAll(pBundles); + } else { + try { + pBundle = bundleService.create(Curator.curationContext(), item, CitationPage.PRESERVATION_BUNDLE_NAME); + // don't inherit now otherwise they will be copied over the moved bitstreams + resourcePolicyService.removeAllPolicies(Curator.curationContext(), pBundle); + } catch (AuthorizeException e) { + log.error("User not authroized to create bundle on item \"" + + item.getName() + "\": " + e.getMessage()); + } + bundles = itemService.getBundles(item, "ORIGINAL"); + } + + //Start looping through our bundles. Anything that is citable in these + //bundles will be cited. + for (Bundle bundle : bundles) { + List bitstreams = bundle.getBitstreams(); + + // Loop through each file and generate a cover page for documents + // that are PDFs. + for (Bitstream bitstream : bitstreams) { + + //If bitstream is a PDF document then it is citable. + CitationDocumentService citationDocument = DisseminateServiceFactory.getInstance() + .getCitationDocumentService(); + + if (citationDocument.canGenerateCitationVersion(Curator.curationContext(), bitstream)) { + this.resBuilder.append(item.getHandle()) + .append(" - ") + .append(bitstream.getName()) + .append(" is citable."); + try { + //Create the cited document + InputStream citedInputStream = + new ByteArrayInputStream( + citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft()); + //Add the cited document to the approiate bundle + this.addCitedPageToItem(citedInputStream, bundle, pBundle, + dBundle, item, bitstream); + // now set the policies of the preservation and display bundle + clonePolicies(Curator.curationContext(), original, pBundle); + clonePolicies(Curator.curationContext(), original, dBundle); + } catch (Exception e) { + //Could be many things, but nothing that should be + //expected. + //Print out some detailed information for debugging. + e.printStackTrace(); + StackTraceElement[] stackTrace = e.getStackTrace(); + StringBuilder stack = new StringBuilder(); + int numLines = Math.min(stackTrace.length, 12); + for (int j = 0; j < numLines; j++) { + stack.append("\t") + .append(stackTrace[j].toString()) + .append("\n"); + } + if (stackTrace.length > numLines) { + stack.append("\t. . .\n"); + } + + log.error(e.toString() + " -> \n" + stack.toString()); + this.resBuilder.append(", but there was an error generating the PDF.\n"); + this.status = Curator.CURATE_ERROR; + } + } else { + //bitstream is not a document + this.resBuilder.append(item.getHandle()) + .append(" - ") + .append(bitstream.getName()) + .append(" is not citable.\n"); + this.status = Curator.CURATE_SUCCESS; + } + } + } + } + + /** + * A helper function for {@link CitationPage#performItem(Item)}. This function takes in the + * cited document as a File and adds it to DSpace properly. + * + * @param citedDoc The inputstream that is the cited document. + * @param bundle The bundle the cited file is from. + * @param pBundle The preservation bundle. The original document should be + * put in here if it is not already. + * @param dBundle The display bundle. The cited document gets put in here. + * @param item The item containing the bundles being used. + * @param bitstream The original source bitstream. + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + * @throws IOException if IO error + */ + protected void addCitedPageToItem(InputStream citedDoc, Bundle bundle, Bundle pBundle, + Bundle dBundle, Item item, + Bitstream bitstream) throws SQLException, AuthorizeException, IOException { + //If we are modifying a file that is not in the + //preservation bundle then we have to move it there. + Context context = Curator.curationContext(); + if (!bundle.getID().equals(pBundle.getID())) { + bundleService.addBitstream(context, pBundle, bitstream); + bundleService.removeBitstream(context, bundle, bitstream); + List bitstreams = bundle.getBitstreams(); + if (bitstreams == null || bitstreams.isEmpty()) { + itemService.removeBundle(context, item, bundle); + } + } + + //Create an input stream form the temporary file + //that is the cited document and create a + //bitstream from it. + if (displayMap.containsKey(bitstream.getName())) { + bundleService.removeBitstream(context, dBundle, displayMap.get(bitstream.getName())); + } + Bitstream citedBitstream = bitstreamService.create(context, dBundle, citedDoc); + citedDoc.close(); //Close up the temporary InputStream + + //Setup a good name for our bitstream and make + //it the same format as the source document. + citedBitstream.setName(context, bitstream.getName()); + bitstreamService.setFormat(context, citedBitstream, bitstream.getFormat(Curator.curationContext())); + citedBitstream.setDescription(context, bitstream.getDescription()); + displayMap.put(bitstream.getName(), citedBitstream); + clonePolicies(context, bitstream, citedBitstream); + this.resBuilder.append(" Added ") + .append(citedBitstream.getName()) + .append(" to the ") + .append(CitationPage.DISPLAY_BUNDLE_NAME) + .append(" bundle.\n"); + + //Run update to propagate changes to the + //database. + itemService.update(context, item); + this.status = Curator.CURATE_SUCCESS; + } + + private void clonePolicies(Context context, DSpaceObject source,DSpaceObject target) + throws SQLException, AuthorizeException { + resourcePolicyService.removeAllPolicies(context, target); + for (ResourcePolicy rp: source.getResourcePolicies()) { + ResourcePolicy newPolicy = resourcePolicyService.clone(context, rp); + newPolicy.setdSpaceObject(target); + newPolicy.setAction(rp.getAction()); + resourcePolicyService.update(context, newPolicy); + } + + } +} diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/CreateMissingIdentifiers.java b/dspace-api/src/main/java/org/dspace/ctask/general/CreateMissingIdentifiers.java new file mode 100644 index 000000000000..9639461426ef --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/ctask/general/CreateMissingIdentifiers.java @@ -0,0 +1,94 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.ctask.general; + +import java.io.IOException; +import java.sql.SQLException; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.curate.AbstractCurationTask; +import org.dspace.curate.Curator; +import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.IdentifierProvider; +import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles; +import org.dspace.identifier.factory.IdentifierServiceFactory; +import org.dspace.identifier.service.IdentifierService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Ensure that an object has all of the identifiers that it should, minting them + * as necessary. + * + * @author Mark H. Wood {@literal } + */ +public class CreateMissingIdentifiers + extends AbstractCurationTask { + private static final Logger LOG = LogManager.getLogger(); + + @Override + public int perform(DSpaceObject dso) + throws IOException { + // Only some kinds of model objects get identifiers + if (!(dso instanceof Item)) { + return Curator.CURATE_SKIP; + } + + // XXX Temporary escape when an incompatible provider is configured. + // XXX Remove this when the provider is fixed. + boolean compatible = DSpaceServicesFactory + .getInstance() + .getServiceManager() + .getServiceByName( + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getCanonicalName(), + IdentifierProvider.class) == null; + if (!compatible) { + setResult("This task is not compatible with VersionedHandleIdentifierProviderWithCanonicalHandles"); + return Curator.CURATE_ERROR; + } + // XXX End of escape + + String typeText = Constants.typeText[dso.getType()]; + + // Get a Context + Context context; + try { + context = Curator.curationContext(); + } catch (SQLException ex) { + report("Could not get the curation Context: " + ex.getMessage()); + return Curator.CURATE_ERROR; + } + + // Find the IdentifierService implementation + IdentifierService identifierService = IdentifierServiceFactory + .getInstance() + .getIdentifierService(); + + // Register any missing identifiers. + try { + identifierService.register(context, dso); + } catch (AuthorizeException | IdentifierException | SQLException ex) { + String message = ex.getMessage(); + report(String.format("Identifier(s) not minted for %s %s: %s%n", + typeText, dso.getID().toString(), message)); + LOG.error("Identifier(s) not minted: {}", message); + return Curator.CURATE_ERROR; + } + + // Success! + report(String.format("%s %s registered.%n", + typeText, dso.getID().toString())); + return Curator.CURATE_SUCCESS; + } +} diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java index edeb2a6d0224..5891fa017cb0 100644 --- a/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java +++ b/dspace-api/src/main/java/org/dspace/ctask/general/MetadataWebService.java @@ -10,11 +10,13 @@ import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Queue; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.xml.XMLConstants; @@ -33,6 +35,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; @@ -60,18 +63,18 @@ * Intended use: cataloging tool in workflow and general curation. * The task uses a URL 'template' to compose the service call, e.g. * - * {@code http://www.sherpa.ac.uk/romeo/api29.php?issn=\{dc.identifier.issn\}} + *

    {@code http://www.sherpa.ac.uk/romeo/api29.php?issn=\{dc.identifier.issn\}} * - * Task will substitute the value of the passed item's metadata field + *

    Task will substitute the value of the passed item's metadata field * in the {parameter} position. If multiple values are present in the * item field, the first value is used. * - * The task uses another property (the datamap) to determine what data + *

    The task uses another property (the datamap) to determine what data * to extract from the service response and how to use it, e.g. * - * {@code //publisher/name=>dc.publisher,//romeocolour} + *

    {@code //publisher/name=>dc.publisher,//romeocolour} * - * Task will evaluate the left-hand side (or entire token) of each + *

    Task will evaluate the left-hand side (or entire token) of each * comma-separated token in the property as an XPath 1.0 expression into * the response document, and if there is a mapping symbol (e.g. {@code '=>'}) and * value, it will assign the response document value(s) to the named @@ -79,48 +82,52 @@ * multiple values, they will all be assigned to the item field. The * mapping symbol governs the nature of metadata field assignment: * - * {@code '->'} mapping will add to any existing values in the item field - * {@code '=>'} mapping will replace any existing values in the item field - * {@code '~>'} mapping will add *only* if item field has no existing values + *

      + *
    • {@code '->'} mapping will add to any existing values in the item field
    • + *
    • {@code '=>'} mapping will replace any existing values in the item field
    • + *
    • {@code '~>'} mapping will add *only* if item field has no existing values
    • + *
    * - * Unmapped data (without a mapping symbol) will simply be added to the task + *

    Unmapped data (without a mapping symbol) will simply be added to the task * result string, prepended by the XPath expression (a little prettified). * Each label/value pair in the result string is separated by a space, * unless the optional 'separator' property is defined. * - * A very rudimentary facility for transformation of data is supported, e.g. + *

    A very rudimentary facility for transformation of data is supported, e.g. * - * {@code http://www.crossref.org/openurl/?id=\{doi:dc.relation.isversionof\}&format=unixref} + *

    {@code http://www.crossref.org/openurl/?id=\{doi:dc.relation.isversionof\}&format=unixref} * - * The 'doi:' prefix will cause the task to look for a 'transform' with that + *

    The 'doi:' prefix will cause the task to look for a 'transform' with that * name, which is applied to the metadata value before parameter substitution * occurs. Transforms are defined in a task property such as the following: * - * {@code transform.doi = match 10. trunc 60} + *

    {@code transform.doi = match 10. trunc 60} * - * This means exclude the value string up to the occurrence of '10.', then + *

    This means exclude the value string up to the occurrence of '10.', then * truncate after 60 characters. The only transform functions currently defined: * - * {@code 'cut' } = remove number leading characters - * {@code 'trunc' } = remove trailing characters after number length - * {@code 'match' } = start match at pattern - * {@code 'text' } = append literal characters (enclose in ' ' when whitespace needed) + *

      + *
    • {@code 'cut' } = remove number leading characters
    • + *
    • {@code 'trunc' } = remove trailing characters after number length
    • + *
    • {@code 'match' } = start match at pattern
    • + *
    • {@code 'text' } = append literal characters (enclose in ' ' when whitespace needed)
    • + *
    * - * If the transform results in an invalid state (e.g. cutting more characters + *

    If the transform results in an invalid state (e.g. cutting more characters * than are in the value), the condition will be logged and the * un-transformed value used. * - * Transforms may also be used in datamaps, e.g. + *

    Transforms may also be used in datamaps, e.g. * - * {@code //publisher/name=>shorten:dc.publisher,//romeocolour} + *

    {@code //publisher/name=>shorten:dc.publisher,//romeocolour} * - * which would apply the 'shorten' transform to the service response value(s) + *

    which would apply the 'shorten' transform to the service response value(s) * prior to metadata field assignment. * - * An optional property 'headers' may be defined to stipulate any HTTP headers + *

    An optional property 'headers' may be defined to stipulate any HTTP headers * required in the service call. The property syntax is double-pipe separated headers: * - * {@code Accept: text/xml||Cache-Control: no-cache} + *

    {@code Accept: text/xml||Cache-Control: no-cache} * * @author richardrodgers */ @@ -128,9 +135,9 @@ @Suspendable public class MetadataWebService extends AbstractCurationTask implements NamespaceContext { /** - * log4j category + * logging category */ - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataWebService.class); + private static final Logger log = LogManager.getLogger(); // transform token parsing pattern protected Pattern ttPattern = Pattern.compile("\'([^\']*)\'|(\\S+)"); // URL of web service with template parameters @@ -200,9 +207,10 @@ public void init(Curator curator, String taskId) throws IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); try { - // disallow DTD parsing to ensure no XXE attacks can occur. + // disallow DTD parsing to ensure no XXE attacks can occur // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); + factory.setXIncludeAware(false); docBuilder = factory.newDocumentBuilder(); } catch (ParserConfigurationException pcE) { log.error("caught exception: " + pcE); @@ -360,42 +368,45 @@ protected String transform(String value, String transDef) { if (transDef == null) { return value; } - String[] tokens = tokenize(transDef); + Queue tokens = tokenize(transDef); String retValue = value; - for (int i = 0; i < tokens.length; i += 2) { - if ("cut".equals(tokens[i]) || "trunc".equals(tokens[i])) { - int index = Integer.parseInt(tokens[i + 1]); + while (!tokens.isEmpty()) { + String function = tokens.poll(); + if ("cut".equals(function) || "trunc".equals(function)) { + String argument = tokens.poll(); + int index = Integer.parseInt(argument); if (retValue.length() > index) { - if ("cut".equals(tokens[i])) { + if ("cut".equals(function)) { retValue = retValue.substring(index); } else { retValue = retValue.substring(0, index); } - } else if ("cut".equals(tokens[i])) { - log.error("requested cut: " + index + " exceeds value length"); + } else if ("cut".equals(function)) { + log.error("requested cut: {} exceeds value length", index); return value; } - } else if ("match".equals(tokens[i])) { - int index2 = retValue.indexOf(tokens[i + 1]); + } else if ("match".equals(function)) { + String argument = tokens.poll(); + int index2 = retValue.indexOf(argument); if (index2 > 0) { retValue = retValue.substring(index2); } else { - log.error("requested match: " + tokens[i + 1] + " failed"); + log.error("requested match: {} failed", argument); return value; } - } else if ("text".equals(tokens[i])) { - retValue = retValue + tokens[i + 1]; + } else if ("text".equals(function)) { + retValue = retValue + tokens.poll(); } else { - log.error(" unknown transform operation: " + tokens[i]); + log.error(" unknown transform operation: " + function); return value; } } return retValue; } - protected String[] tokenize(String text) { - List list = new ArrayList<>(); + protected Queue tokenize(String text) { Matcher m = ttPattern.matcher(text); + Queue list = new ArrayDeque<>(m.groupCount()); while (m.find()) { if (m.group(1) != null) { list.add(m.group(1)); @@ -403,7 +414,7 @@ protected String[] tokenize(String text) { list.add(m.group(2)); } } - return list.toArray(new String[0]); + return list; } protected int getMapIndex(String mapping) { diff --git a/dspace-api/src/main/java/org/dspace/ctask/general/RegisterDOI.java b/dspace-api/src/main/java/org/dspace/ctask/general/RegisterDOI.java new file mode 100644 index 000000000000..0765d7b000d1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/ctask/general/RegisterDOI.java @@ -0,0 +1,156 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.ctask.general; + +import java.io.IOException; +import java.sql.SQLException; + +import org.apache.logging.log4j.Logger; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.logic.Filter; +import org.dspace.content.logic.FilterUtils; +import org.dspace.content.logic.TrueFilter; +import org.dspace.curate.AbstractCurationTask; +import org.dspace.curate.Curator; +import org.dspace.identifier.DOIIdentifierProvider; +import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.doi.DOIIdentifierNotApplicableException; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; + +/** + * This curation task will register a DOI for an item, optionally ignoring any logical filtering applied + * to normal identifier registration and DOI service operation. + * + * @author Kim Shepherd + */ +public class RegisterDOI extends AbstractCurationTask { + // Curation task status + private int status = Curator.CURATE_SUCCESS; + // The skipFilter boolean has a default value of 'true', as per intended operation + private boolean skipFilter = true; + // The distributed boolean has a default value of 'false' for safest operation + private boolean distributed = false; + // Prefix for configuration module + private static final String PLUGIN_PREFIX = "doi-curation"; + // Logger + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RegisterDOI.class); + // DOI provider + private DOIIdentifierProvider provider; + private Filter trueFilter; + + /** + * Initialise the curation task and read configuration, instantiate the DOI provider + */ + @Override + public void init(Curator curator, String taskId) throws IOException { + super.init(curator, taskId); + // Get distribution behaviour from configuration, with a default value of 'false' + distributed = configurationService.getBooleanProperty(PLUGIN_PREFIX + ".distributed", false); + log.debug("PLUGIN_PREFIX = " + PLUGIN_PREFIX + ", skipFilter = " + skipFilter + + ", distributed = " + distributed); + // Instantiate DOI provider singleton + provider = new DSpace().getSingletonService(DOIIdentifierProvider.class); + trueFilter = DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName( + "always_true_filter", TrueFilter.class); + } + + /** + * Override the abstract 'perform' method to either distribute, or perform single-item + * depending on configuration. By default, the task is *not* distributed, since that could be unsafe + * and the original purpose of this task is to essentially implement a "Register DOI" button on the Edit Item page. + * @param dso DSpaceObject for which to register a DOI (must be item) + * @return status indicator + * @throws IOException + */ + @Override + public int perform(DSpaceObject dso) throws IOException { + // Check distribution configuration + if (distributed) { + // This task is configured for distributed use. Call distribute() and let performItem handle + // the main processing. + distribute(dso); + } else { + // This task is NOT configured for distributed use (default). Instead process a single item directly + if (dso instanceof Item) { + Item item = (Item) dso; + performRegistration(item); + } else { + log.warn("DOI registration attempted on non-item DSpace Object: " + dso.getID()); + } + return status; + } + return status; + } + + /** + * This is called when the task is distributed (ie. called on a set of items or over a whole structure) + * @param item the DSpace Item + */ + @Override + protected void performItem(Item item) { + performRegistration(item); + } + + /** + * Shared 'perform' code between perform() and performItem() - a curation wrapper for the register() method + * @param item the item for which to register a DOI + */ + private void performRegistration(Item item) { + // Request DOI registration and report results + String doi = register(item); + String result = "DOI registration task performed on " + item.getHandle() + "."; + if (doi != null) { + result += " DOI: (" + doi + ")"; + } else { + result += " DOI was null, either item was filtered or an error was encountered."; + } + setResult(result); + report(result); + } + + /** + * Perform the DOIIdentifierProvider.register call, with skipFilter passed as per config and defaults + * @param item The item for which to register a DOI + */ + private String register(Item item) { + String doi = null; + // Attempt DOI registration and report successes and failures + try { + Filter filter = FilterUtils.getFilterFromConfiguration("identifiers.submission.filter.curation", + trueFilter); + doi = provider.register(Curator.curationContext(), item, filter); + if (doi != null) { + String message = "New DOI minted in database for item " + item.getHandle() + ": " + doi + + ". This DOI will be registered online with the DOI provider when the queue is next run"; + report(message); + } else { + log.error("Got a null DOI after registering..."); + } + } catch (SQLException e) { + // Exception obtaining context + log.error("Error obtaining curator context: " + e.getMessage()); + status = Curator.CURATE_ERROR; + } catch (DOIIdentifierNotApplicableException e) { + // Filter returned 'false' so DOI was not registered. This is normal behaviour when filter is running. + log.info("Item was filtered from DOI registration: " + e.getMessage()); + String message = "Item " + item.getHandle() + " was skipped from DOI registration because it matched " + + "the item filter configured in identifier-services.xml."; + report(message); + status = Curator.CURATE_SUCCESS; + } catch (IdentifierException e) { + // Any other identifier exception is probably a true error + log.error("Error registering identifier: " + e.getMessage()); + status = Curator.CURATE_ERROR; + } + + return doi; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/curate/AbstractCurationTask.java b/dspace-api/src/main/java/org/dspace/curate/AbstractCurationTask.java index d5ec37d60b98..fa16d2736953 100644 --- a/dspace-api/src/main/java/org/dspace/curate/AbstractCurationTask.java +++ b/dspace-api/src/main/java/org/dspace/curate/AbstractCurationTask.java @@ -13,7 +13,6 @@ import java.util.List; import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.DSpaceObject; @@ -39,8 +38,6 @@ public abstract class AbstractCurationTask implements CurationTask { protected Curator curator = null; // curator-assigned taskId protected String taskId = null; - // logger - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AbstractCurationTask.class); protected CommunityService communityService; protected ItemService itemService; protected HandleService handleService; diff --git a/dspace-api/src/main/java/org/dspace/curate/CitationPage.java b/dspace-api/src/main/java/org/dspace/curate/CitationPage.java deleted file mode 100644 index 386bf0ba9272..000000000000 --- a/dspace-api/src/main/java/org/dspace/curate/CitationPage.java +++ /dev/null @@ -1,247 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.curate; - -import java.io.IOException; -import java.io.InputStream; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.lang3.tuple.Pair; -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.AuthorizeException; -import org.dspace.content.Bitstream; -import org.dspace.content.BitstreamFormat; -import org.dspace.content.Bundle; -import org.dspace.content.DSpaceObject; -import org.dspace.content.Item; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.BitstreamService; -import org.dspace.content.service.BundleService; -import org.dspace.core.Context; -import org.dspace.disseminate.factory.DisseminateServiceFactory; -import org.dspace.disseminate.service.CitationDocumentService; - -/** - * CitationPage - * - * This task is used to generate a cover page with citation information for text - * documents and then to add that cover page to a PDF version of the document - * replacing the originally uploaded document form the user's perspective. - * - * @author Ryan McGowan - */ - -@Distributive -@Mutative -public class CitationPage extends AbstractCurationTask { - /** - * Class Logger - */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(CitationPage.class); - - protected int status = Curator.CURATE_UNSET; - protected String result = null; - /** - * A StringBuilder to handle result string building process. - */ - protected StringBuilder resBuilder; - - - /** - * The name to give the bundle we add the cited pages to. - */ - protected static final String DISPLAY_BUNDLE_NAME = "DISPLAY"; - /** - * The name of the bundle to move source documents into after they have been - * cited. - */ - protected static final String PRESERVATION_BUNDLE_NAME = "PRESERVATION"; - - protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); - protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService(); - - /** - * {@inheritDoc} - * - * @see CurationTask#perform(DSpaceObject) - */ - @Override - public int perform(DSpaceObject dso) throws IOException { - - // Deal with status and result as well as call distribute. - this.resBuilder = new StringBuilder(); - this.distribute(dso); - this.result = this.resBuilder.toString(); - this.setResult(this.result); - this.report(this.result); - - return this.status; - } - - /** - * {@inheritDoc} - * - * @see AbstractCurationTask#performItem(Item) - */ - @Override - protected void performItem(Item item) throws SQLException { - //Determine if the DISPLAY bundle exits. If not, create it. - List dBundles = itemService.getBundles(item, CitationPage.DISPLAY_BUNDLE_NAME); - Bundle dBundle = null; - if (dBundles == null || dBundles.size() == 0) { - try { - dBundle = bundleService.create(Curator.curationContext(), item, CitationPage.DISPLAY_BUNDLE_NAME); - } catch (AuthorizeException e) { - log.error("User not authroized to create bundle on item \"" - + item.getName() + "\": " + e.getMessage()); - } - } else { - dBundle = dBundles.get(0); - } - - //Create a map of the bitstreams in the displayBundle. This is used to - //check if the bundle being cited is already in the display bundle. - Map displayMap = new HashMap(); - for (Bitstream bs : dBundle.getBitstreams()) { - displayMap.put(bs.getName(), bs); - } - - //Determine if the preservation bundle exists and add it if we need to. - //Also, set up bundles so it contains all ORIGINAL and PRESERVATION - //bitstreams. - List pBundles = itemService.getBundles(item, CitationPage.PRESERVATION_BUNDLE_NAME); - Bundle pBundle = null; - List bundles = new ArrayList<>(); - if (pBundles != null && pBundles.size() > 0) { - pBundle = pBundles.get(0); - bundles.addAll(itemService.getBundles(item, "ORIGINAL")); - bundles.addAll(pBundles); - } else { - try { - pBundle = bundleService.create(Curator.curationContext(), item, CitationPage.PRESERVATION_BUNDLE_NAME); - } catch (AuthorizeException e) { - log.error("User not authroized to create bundle on item \"" - + item.getName() + "\": " + e.getMessage()); - } - bundles = itemService.getBundles(item, "ORIGINAL"); - } - - //Start looping through our bundles. Anything that is citable in these - //bundles will be cited. - for (Bundle bundle : bundles) { - List bitstreams = bundle.getBitstreams(); - - // Loop through each file and generate a cover page for documents - // that are PDFs. - for (Bitstream bitstream : bitstreams) { - BitstreamFormat format = bitstream.getFormat(Curator.curationContext()); - - //If bitstream is a PDF document then it is citable. - CitationDocumentService citationDocument = DisseminateServiceFactory.getInstance() - .getCitationDocumentService(); - - if (citationDocument.canGenerateCitationVersion(Curator.curationContext(), bitstream)) { - this.resBuilder.append(item.getHandle() + " - " - + bitstream.getName() + " is citable."); - try { - //Create the cited document - Pair citedDocument = - citationDocument.makeCitedDocument(Curator.curationContext(), bitstream); - //Add the cited document to the approiate bundle - this.addCitedPageToItem(citedDocument.getLeft(), bundle, pBundle, - dBundle, displayMap, item, bitstream); - } catch (Exception e) { - //Could be many things, but nothing that should be - //expected. - //Print out some detailed information for debugging. - e.printStackTrace(); - StackTraceElement[] stackTrace = e.getStackTrace(); - StringBuilder stack = new StringBuilder(); - int numLines = Math.min(stackTrace.length, 12); - for (int j = 0; j < numLines; j++) { - stack.append("\t" + stackTrace[j].toString() + "\n"); - } - if (stackTrace.length > numLines) { - stack.append("\t. . .\n"); - } - - log.error(e.toString() + " -> \n" + stack.toString()); - this.resBuilder.append(", but there was an error generating the PDF.\n"); - this.status = Curator.CURATE_ERROR; - } - } else { - //bitstream is not a document - this.resBuilder.append(item.getHandle() + " - " - + bitstream.getName() + " is not citable.\n"); - this.status = Curator.CURATE_SUCCESS; - } - } - } - } - - /** - * A helper function for {@link CitationPage#performItem(Item)}. This function takes in the - * cited document as a File and adds it to DSpace properly. - * - * @param citedDoc The inputstream that is the cited document. - * @param bundle The bundle the cited file is from. - * @param pBundle The preservation bundle. The original document should be - * put in here if it is not already. - * @param dBundle The display bundle. The cited document gets put in here. - * @param displayMap The map of bitstream names to bitstreams in the display - * bundle. - * @param item The item containing the bundles being used. - * @param bitstream The original source bitstream. - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - * @throws IOException if IO error - */ - protected void addCitedPageToItem(InputStream citedDoc, Bundle bundle, Bundle pBundle, - Bundle dBundle, Map displayMap, Item item, - Bitstream bitstream) throws SQLException, AuthorizeException, IOException { - //If we are modifying a file that is not in the - //preservation bundle then we have to move it there. - Context context = Curator.curationContext(); - if (bundle.getID() != pBundle.getID()) { - bundleService.addBitstream(context, pBundle, bitstream); - bundleService.removeBitstream(context, bundle, bitstream); - List bitstreams = bundle.getBitstreams(); - if (bitstreams == null || bitstreams.size() == 0) { - itemService.removeBundle(context, item, bundle); - } - } - - //Create an input stream form the temporary file - //that is the cited document and create a - //bitstream from it. - if (displayMap.containsKey(bitstream.getName())) { - bundleService.removeBitstream(context, dBundle, displayMap.get(bitstream.getName())); - } - Bitstream citedBitstream = bitstreamService.create(context, dBundle, citedDoc); - citedDoc.close(); //Close up the temporary InputStream - - //Setup a good name for our bitstream and make - //it the same format as the source document. - citedBitstream.setName(context, bitstream.getName()); - bitstreamService.setFormat(context, citedBitstream, bitstream.getFormat(Curator.curationContext())); - citedBitstream.setDescription(context, bitstream.getDescription()); - - this.resBuilder.append(" Added " - + citedBitstream.getName() - + " to the " + CitationPage.DISPLAY_BUNDLE_NAME + " bundle.\n"); - - //Run update to propagate changes to the - //database. - itemService.update(context, item); - this.status = Curator.CURATE_SUCCESS; - } -} diff --git a/dspace-api/src/main/java/org/dspace/curate/Curation.java b/dspace-api/src/main/java/org/dspace/curate/Curation.java index a01c731189bf..4d70286e79e0 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curation.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curation.java @@ -152,17 +152,10 @@ private long runQueue(TaskQueue queue, Curator curator) throws SQLException, Aut super.handler.logInfo("Curating id: " + entry.getObjectId()); } curator.clear(); - // does entry relate to a DSO or workflow object? - if (entry.getObjectId().indexOf('/') > 0) { - for (String taskName : entry.getTaskNames()) { - curator.addTask(taskName); - } - curator.curate(context, entry.getObjectId()); - } else { - // TODO: Remove this exception once curation tasks are supported by configurable workflow - // e.g. see https://github.com/DSpace/DSpace/pull/3157 - throw new IllegalArgumentException("curation for workflow items is no longer supported"); + for (String taskName : entry.getTaskNames()) { + curator.addTask(taskName); } + curator.curate(context, entry.getObjectId()); } queue.release(this.queue, ticket, true); return ticket; @@ -189,7 +182,7 @@ private void endScript(long timeRun) throws SQLException { * @throws FileNotFoundException If file of command line variable -r reporter is not found */ private Curator initCurator() throws FileNotFoundException { - Curator curator = new Curator(); + Curator curator = new Curator(handler); OutputStream reporterStream; if (null == this.reporter) { reporterStream = new NullOutputStream(); @@ -259,12 +252,19 @@ protected void assignCurrentUserInContext() throws ParseException { super.handler.logError("EPerson not found: " + currentUserUuid); throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid); } + assignSpecialGroupsInContext(); this.context.setCurrentUser(eperson); } catch (SQLException e) { handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e); } } + protected void assignSpecialGroupsInContext() throws SQLException { + for (UUID uuid : handler.getSpecialGroups()) { + context.setSpecialGroup(uuid); + } + } + /** * Fills in some optional command line options. * Checks if there are missing required options or invalid values for options. diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java index 5e1d014873e9..eaa04f477829 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java @@ -19,7 +19,6 @@ public class CurationCliScriptConfiguration extends CurationScriptConfiguration< public Options getOptions() { options = super.getOptions(); options.addOption("e", "eperson", true, "email address of curating eperson"); - options.getOption("e").setType(String.class); options.getOption("e").setRequired(true); return options; } diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java index fefb4eb768ea..2587e6b0251e 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java @@ -8,12 +8,15 @@ package org.dspace.curate; import java.sql.SQLException; +import java.util.List; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.DSpaceObject; import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link Curation} script @@ -22,9 +25,6 @@ */ public class CurationScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,16 +38,37 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { } /** - * Only admin can run Curation script via the scripts and processes endpoints. - * @param context The relevant DSpace context - * @return True if currentUser is admin, otherwise false + * Only repository admins or admins of the target object can run Curation script via the scripts + * and processes endpoints. + * + * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise + * @return true if the currentUser is allowed to run the script with the specified parameters or + * at least in some case if the parameters are not yet known */ @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { try { - return authorizeService.isAdmin(context); + if (commandLineParameters == null) { + return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) + || authorizeService.isItemAdmin(context); + } else if (commandLineParameters.stream() + .map(DSpaceCommandLineParameter::getName) + .noneMatch("-i"::equals)) { + return authorizeService.isAdmin(context); + } else { + String dspaceObjectID = commandLineParameters.stream() + .filter(parameter -> "-i".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .findFirst() + .get(); + HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + DSpaceObject dso = handleService.resolveToObject(context, dspaceObjectID); + return authorizeService.isAdmin(context, dso); + } } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + throw new RuntimeException(e); } } diff --git a/dspace-api/src/main/java/org/dspace/curate/Curator.java b/dspace-api/src/main/java/org/dspace/curate/Curator.java index aa6cb14fda7e..4076fab51989 100644 --- a/dspace-api/src/main/java/org/dspace/curate/Curator.java +++ b/dspace-api/src/main/java/org/dspace/curate/Curator.java @@ -9,6 +9,7 @@ import java.io.IOException; import java.sql.SQLException; +import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; @@ -30,6 +31,7 @@ import org.dspace.core.factory.CoreServiceFactory; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.service.HandleService; +import org.dspace.scripts.handler.DSpaceRunnableHandler; /** * Curator orchestrates and manages the application of a one or more curation @@ -90,6 +92,17 @@ public static enum TxScope { protected CommunityService communityService; protected ItemService itemService; protected HandleService handleService; + protected DSpaceRunnableHandler handler; + + /** + * constructor that uses an handler for logging + * + * @param handler {@code DSpaceRunnableHandler} used to logs infos + */ + public Curator(DSpaceRunnableHandler handler) { + this(); + this.handler = handler; + } /** * No-arg constructor @@ -338,7 +351,7 @@ public void clear() { */ public void report(String message) { if (null == reporter) { - log.warn("report called with no Reporter set: {}", message); + logWarning("report called with no Reporter set: {}", message); return; } @@ -435,7 +448,7 @@ protected boolean doSite(TaskRunner tr, Site site) throws IOException { // Site-wide Tasks really should have an EPerson performer associated with them, // otherwise they are run as an "anonymous" user with limited access rights. if (ctx.getCurrentUser() == null && !ctx.ignoreAuthorization()) { - log.warn("You are running one or more Site-Wide curation tasks in ANONYMOUS USER mode," + + logWarning("You are running one or more Site-Wide curation tasks in ANONYMOUS USER mode," + " as there is no EPerson 'performer' associated with this task. To associate an EPerson " + "'performer' " + " you should ensure tasks are called via the Curator.curate(Context, ID) method."); @@ -546,7 +559,7 @@ public boolean run(DSpaceObject dso) throws IOException { } statusCode = task.perform(dso); String id = (dso.getHandle() != null) ? dso.getHandle() : "workflow item: " + dso.getID(); - log.info(logMessage(id)); + logInfo(logMessage(id)); visit(dso); return !suspend(statusCode); } catch (IOException ioe) { @@ -562,7 +575,7 @@ public boolean run(Context c, String id) throws IOException { throw new IOException("Context or identifier is null"); } statusCode = task.perform(c, id); - log.info(logMessage(id)); + logInfo(logMessage(id)); visit(null); return !suspend(statusCode); } catch (IOException ioe) { @@ -604,5 +617,51 @@ protected String logMessage(String id) { } return mb.toString(); } + + /** + * Proxy method for logging with INFO level + * + * @param message that needs to be logged + */ + protected void logInfo(String message) { + if (handler == null) { + log.info(message); + } else { + handler.logInfo(message); + } + } + + } + + /** + * Proxt method for logging with WARN level + * + * @param message + */ + protected void logWarning(String message) { + logWarning(message, null); + } + + /** + * Proxy method for logging with WARN level and a {@code Messageformatter} + * that generates the final log. + * + * @param message Target message to format or print + * @param object Object to use inside the message, or null + */ + protected void logWarning(String message, Object object) { + if (handler == null) { + if (object != null) { + log.warn(message, object); + } else { + log.warn(message); + } + } else { + if (object != null) { + handler.logWarning(MessageFormat.format(message, object)); + } else { + handler.logWarning(message); + } + } } } diff --git a/dspace-api/src/main/java/org/dspace/curate/FileTaskQueue.java b/dspace-api/src/main/java/org/dspace/curate/FileTaskQueue.java index 979ade99a57b..f603fa2e9a6b 100644 --- a/dspace-api/src/main/java/org/dspace/curate/FileTaskQueue.java +++ b/dspace-api/src/main/java/org/dspace/curate/FileTaskQueue.java @@ -13,16 +13,17 @@ import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.services.factory.DSpaceServicesFactory; - /** * FileTaskQueue provides a TaskQueue implementation based on flat files * for the queues and semaphores. @@ -30,14 +31,16 @@ * @author richardrodgers */ public class FileTaskQueue implements TaskQueue { - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(TaskQueue.class); + private static final Logger log = LogManager.getLogger(TaskQueue.class); + // base directory for curation task queues protected String tqDir; // ticket for queue readers protected long readTicket = -1L; + // list of queues owned by reader - protected List readList = new ArrayList(); + protected List readList = new ArrayList<>(); public FileTaskQueue() { tqDir = DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("curate.taskqueue.dir"); @@ -72,7 +75,7 @@ public synchronized void enqueue(String queueName, Set entrySet) BufferedWriter writer = null; try { File queue = new File(qDir, "queue" + Integer.toString(queueIdx)); - writer = new BufferedWriter(new FileWriter(queue, true)); + writer = new BufferedWriter(new FileWriter(queue, StandardCharsets.UTF_8, true)); Iterator iter = entrySet.iterator(); while (iter.hasNext()) { writer.write(iter.next().toString()); @@ -96,7 +99,7 @@ public synchronized void enqueue(String queueName, Set entrySet) @Override public synchronized Set dequeue(String queueName, long ticket) throws IOException { - Set entrySet = new HashSet(); + Set entrySet = new HashSet<>(); if (readTicket == -1L) { // hold the ticket & copy all Ids available, locking queues // stop when no more queues or one found locked @@ -113,8 +116,8 @@ public synchronized Set dequeue(String queueName, long ticket) // read contents from file BufferedReader reader = null; try { - reader = new BufferedReader(new FileReader(queue)); - String entryStr = null; + reader = new BufferedReader(new FileReader(queue, StandardCharsets.UTF_8)); + String entryStr; while ((entryStr = reader.readLine()) != null) { entryStr = entryStr.trim(); if (entryStr.length() > 0) { diff --git a/dspace-api/src/main/java/org/dspace/curate/ResolvedTask.java b/dspace-api/src/main/java/org/dspace/curate/ResolvedTask.java index 89e92609f0aa..0b05ab3e0f12 100644 --- a/dspace-api/src/main/java/org/dspace/curate/ResolvedTask.java +++ b/dspace-api/src/main/java/org/dspace/curate/ResolvedTask.java @@ -24,7 +24,7 @@ public class ResolvedTask { private CurationTask cTask; private ScriptedTask sTask; // local name of task - private String taskName; + private final String taskName; // annotation data private boolean distributive = false; private boolean mutative = false; @@ -76,7 +76,7 @@ public void init(Curator curator) throws IOException { * @throws IOException if error */ public int perform(DSpaceObject dso) throws IOException { - return (unscripted()) ? cTask.perform(dso) : sTask.performDso(dso); + return unscripted() ? cTask.perform(dso) : sTask.performDso(dso); } /** @@ -88,7 +88,7 @@ public int perform(DSpaceObject dso) throws IOException { * @throws IOException if error */ public int perform(Context ctx, String id) throws IOException { - return (unscripted()) ? cTask.perform(ctx, id) : sTask.performId(ctx, id); + return unscripted() ? cTask.perform(ctx, id) : sTask.performId(ctx, id); } /** diff --git a/dspace-api/src/main/java/org/dspace/curate/TaskResolver.java b/dspace-api/src/main/java/org/dspace/curate/TaskResolver.java index a9a8e8906a95..2b12745d8f29 100644 --- a/dspace-api/src/main/java/org/dspace/curate/TaskResolver.java +++ b/dspace-api/src/main/java/org/dspace/curate/TaskResolver.java @@ -15,11 +15,13 @@ import java.io.IOException; import java.io.Reader; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.Properties; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.core.factory.CoreServiceFactory; import org.dspace.services.factory.DSpaceServicesFactory; @@ -64,7 +66,7 @@ public class TaskResolver { // logging service - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(TaskResolver.class); + private static final Logger log = LogManager.getLogger(TaskResolver.class); // base directory of task scripts & catalog name protected static final String CATALOG = "task.catalog"; @@ -94,7 +96,7 @@ public boolean installScript(String taskName, String fileName) { if (script.exists()) { BufferedReader reader = null; try { - reader = new BufferedReader(new FileReader(script)); + reader = new BufferedReader(new FileReader(script, StandardCharsets.UTF_8)); String line = null; while ((line = reader.readLine()) != null) { if (line.startsWith("#") && line.indexOf("$td=") > 0) { @@ -136,7 +138,7 @@ public void addDescriptor(String taskName, String descriptor) { catalog.put(taskName, descriptor); Writer writer = null; try { - writer = new FileWriter(new File(scriptDir, CATALOG)); + writer = new FileWriter(new File(scriptDir, CATALOG), StandardCharsets.UTF_8); catalog.store(writer, "do not edit"); } catch (IOException ioE) { log.error("Error saving scripted task catalog: " + CATALOG); @@ -179,7 +181,7 @@ public ResolvedTask resolveTask(String taskName) { File script = new File(scriptDir, tokens[1]); if (script.exists()) { try { - Reader reader = new FileReader(script); + Reader reader = new FileReader(script, StandardCharsets.UTF_8); engine.eval(reader); reader.close(); // third token is the constructor expression for the class @@ -212,7 +214,7 @@ protected void loadCatalog() { File catalogFile = new File(scriptDir, CATALOG); if (catalogFile.exists()) { try { - Reader reader = new FileReader(catalogFile); + Reader reader = new FileReader(catalogFile, StandardCharsets.UTF_8); catalog.load(reader); reader.close(); } catch (IOException ioE) { diff --git a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java index 4eb7ec5348a6..00e91ee1fb40 100644 --- a/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/curate/XmlWorkflowCuratorServiceImpl.java @@ -13,13 +13,15 @@ import java.util.ArrayList; import java.util.List; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.Item; import org.dspace.content.service.CollectionService; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.curate.service.XmlWorkflowCuratorService; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; @@ -30,6 +32,7 @@ import org.dspace.workflow.FlowStep; import org.dspace.workflow.Task; import org.dspace.workflow.TaskSet; +import org.dspace.xmlworkflow.Role; import org.dspace.xmlworkflow.RoleMembers; import org.dspace.xmlworkflow.WorkflowConfigurationException; import org.dspace.xmlworkflow.factory.XmlWorkflowFactory; @@ -47,14 +50,17 @@ * Manage interactions between curation and workflow. A curation task can be * attached to a workflow step, to be executed during the step. * + *

    + * NOTE: when run in workflow, curation tasks run with + * authorization disabled. + * * @see CurationTaskConfig * @author mwood */ @Service public class XmlWorkflowCuratorServiceImpl implements XmlWorkflowCuratorService { - private static final Logger LOG - = org.apache.logging.log4j.LogManager.getLogger(); + private static final Logger LOG = LogManager.getLogger(); @Autowired(required = true) protected XmlWorkflowFactory workflowFactory; @@ -97,7 +103,18 @@ public boolean doCuration(Context c, XmlWorkflowItem wfi) throws AuthorizeException, IOException, SQLException { Curator curator = new Curator(); curator.setReporter(reporter); - return curate(curator, c, wfi); + c.turnOffAuthorisationSystem(); + boolean wasAnonymous = false; + if (null == c.getCurrentUser()) { // We need someone to email + wasAnonymous = true; + c.setCurrentUser(ePersonService.getSystemEPerson(c)); + } + boolean failedP = curate(curator, c, wfi); + if (wasAnonymous) { + c.setCurrentUser(null); + } + c.restoreAuthSystemState(); + return failedP; } @Override @@ -107,7 +124,7 @@ public boolean curate(Curator curator, Context c, String wfId) if (wfi != null) { return curate(curator, c, wfi); } else { - LOG.warn(LogManager.getHeader(c, "No workflow item found for id: {}", null), wfId); + LOG.warn(LogHelper.getHeader(c, "No workflow item found for id: {}", null), wfId); } return false; } @@ -123,40 +140,47 @@ public boolean curate(Curator curator, Context c, XmlWorkflowItem wfi) item.setOwningCollection(wfi.getCollection()); for (Task task : step.tasks) { curator.addTask(task.name); - curator.curate(item); - int status = curator.getStatus(task.name); - String result = curator.getResult(task.name); - String action = "none"; - switch (status) { - case Curator.CURATE_FAIL: - // task failed - notify any contacts the task has assigned - if (task.powers.contains("reject")) { - action = "reject"; - } - notifyContacts(c, wfi, task, "fail", action, result); - // if task so empowered, reject submission and terminate - if ("reject".equals(action)) { - workflowService.sendWorkflowItemBackSubmission(c, wfi, - c.getCurrentUser(), null, - task.name + ": " + result); - return false; - } - break; - case Curator.CURATE_SUCCESS: - if (task.powers.contains("approve")) { - action = "approve"; - } - notifyContacts(c, wfi, task, "success", action, result); - if ("approve".equals(action)) { - // cease further task processing and advance submission - return true; - } - break; - case Curator.CURATE_ERROR: - notifyContacts(c, wfi, task, "error", action, result); - break; - default: - break; + // Check whether the task is configured to be queued rather than automatically run + if (StringUtils.isNotEmpty(step.queue)) { + // queue attribute has been set in the FlowStep configuration: add task to configured queue + curator.queue(c, item.getID().toString(), step.queue); + } else { + // Task is configured to be run automatically + curator.curate(c, item); + int status = curator.getStatus(task.name); + String result = curator.getResult(task.name); + String action = "none"; + switch (status) { + case Curator.CURATE_FAIL: + // task failed - notify any contacts the task has assigned + if (task.powers.contains("reject")) { + action = "reject"; + } + notifyContacts(c, wfi, task, "fail", action, result); + // if task so empowered, reject submission and terminate + if ("reject".equals(action)) { + workflowService.sendWorkflowItemBackSubmission(c, wfi, + c.getCurrentUser(), null, + task.name + ": " + result); + return false; + } + break; + case Curator.CURATE_SUCCESS: + if (task.powers.contains("approve")) { + action = "approve"; + } + notifyContacts(c, wfi, task, "success", action, result); + if ("approve".equals(action)) { + // cease further task processing and advance submission + return true; + } + break; + case Curator.CURATE_ERROR: + notifyContacts(c, wfi, task, "error", action, result); + break; + default: + break; + } } curator.clear(); } @@ -223,8 +247,12 @@ protected void notifyContacts(Context c, XmlWorkflowItem wfi, String status, String action, String message) throws AuthorizeException, IOException, SQLException { List epa = resolveContacts(c, task.getContacts(status), wfi); - if (epa.size() > 0) { + if (!epa.isEmpty()) { workflowService.notifyOfCuration(c, wfi, epa, task.name, action, message); + } else { + LOG.warn("No contacts were found for workflow item {}: " + + "task {} returned action {} with message {}", + wfi.getID(), task.name, action, message); } } @@ -247,8 +275,7 @@ protected List resolveContacts(Context c, List contacts, // decode contacts if ("$flowgroup".equals(contact)) { // special literal for current flowgoup - ClaimedTask claimedTask = claimedTaskService.findByWorkflowIdAndEPerson(c, wfi, c.getCurrentUser()); - String stepID = claimedTask.getStepID(); + String stepID = getFlowStep(c, wfi).step; Step step; try { Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection()); @@ -258,19 +285,26 @@ protected List resolveContacts(Context c, List contacts, String.valueOf(wfi.getID()), e); return epList; } - RoleMembers roleMembers = step.getRole().getMembers(c, wfi); - for (EPerson ep : roleMembers.getEPersons()) { - epList.add(ep); - } - for (Group group : roleMembers.getGroups()) { - epList.addAll(group.getMembers()); + Role role = step.getRole(); + if (null != role) { + RoleMembers roleMembers = role.getMembers(c, wfi); + for (EPerson ep : roleMembers.getEPersons()) { + epList.add(ep); + } + for (Group group : roleMembers.getGroups()) { + epList.addAll(group.getMembers()); + } + } else { + epList.add(ePersonService.getSystemEPerson(c)); } } else if ("$colladmin".equals(contact)) { + // special literal for collection administrators Group adGroup = wfi.getCollection().getAdministrators(); if (adGroup != null) { epList.addAll(groupService.allMembers(c, adGroup)); } } else if ("$siteadmin".equals(contact)) { + // special literal for site administrator EPerson siteEp = ePersonService.findByEmail(c, configurationService.getProperty("mail.admin")); if (siteEp != null) { diff --git a/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java b/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java index 2ad1eac12904..778b779cfe03 100644 --- a/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java +++ b/dspace-api/src/main/java/org/dspace/curate/service/XmlWorkflowCuratorService.java @@ -42,9 +42,9 @@ public boolean needsCuration(Context c, XmlWorkflowItem wfi) * * @param c the context * @param wfi the workflow item - * @return true if curation was completed or not required, + * @return true if curation was completed or not required; * false if tasks were queued for later completion, - * or item was rejected + * or item was rejected. * @throws AuthorizeException if authorization error * @throws IOException if IO error * @throws SQLException if database error @@ -58,7 +58,9 @@ public boolean doCuration(Context c, XmlWorkflowItem wfi) * @param curator the curation context * @param c the user context * @param wfId the workflow item's ID - * @return true if curation failed. + * @return true if curation curation was completed or not required; + * false if tasks were queued for later completion, + * or item was rejected. * @throws AuthorizeException if authorization error * @throws IOException if IO error * @throws SQLException if database error @@ -72,7 +74,9 @@ public boolean curate(Curator curator, Context c, String wfId) * @param curator the curation context * @param c the user context * @param wfi the workflow item - * @return true if curation failed. + * @return true if workflow curation was completed or not required; + * false if tasks were queued for later completion, + * or item was rejected. * @throws AuthorizeException if authorization error * @throws IOException if IO error * @throws SQLException if database error diff --git a/dspace-api/src/main/java/org/dspace/discovery/DiscoverQuery.java b/dspace-api/src/main/java/org/dspace/discovery/DiscoverQuery.java index d82779015f66..e133ad0ed170 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/DiscoverQuery.java +++ b/dspace-api/src/main/java/org/dspace/discovery/DiscoverQuery.java @@ -15,7 +15,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; -import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -23,7 +22,7 @@ import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; /** - * This class represents a query which the discovery backend can use + * This class represents a query which the discovery back-end can use. * * @author Kevin Van de Velde (kevin at atmire dot com) */ @@ -33,9 +32,9 @@ public class DiscoverQuery { * Main attributes for the discovery query **/ private String query; - private List filterQueries; + private final List filterQueries; private List dspaceObjectFilters = new ArrayList<>(); - private List fieldPresentQueries; + private final List fieldPresentQueries; private boolean spellCheck; private int start = 0; @@ -55,36 +54,35 @@ public enum SORT_ORDER { /** * Attributes required for the faceting of values **/ - private List facetFields; - private List facetQueries; - private int facetLimit = -1; + private final List facetFields; + private final List facetQueries; private int facetMinCount = -1; private int facetOffset = 0; - private Map hitHighlighting; + private final Map hitHighlighting; /** * Used when you want to search for a specific field value **/ - private List searchFields; + private final List searchFields; /** * Misc attributes can be implementation dependent **/ - private Map> properties; + private final Map> properties; private String discoveryConfigurationName; public DiscoverQuery() { //Initialize all our lists - this.filterQueries = new ArrayList(); - this.fieldPresentQueries = new ArrayList(); + this.filterQueries = new ArrayList<>(); + this.fieldPresentQueries = new ArrayList<>(); - this.facetFields = new ArrayList(); - this.facetQueries = new ArrayList(); - this.searchFields = new ArrayList(); - this.hitHighlighting = new HashMap(); + this.facetFields = new ArrayList<>(); + this.facetQueries = new ArrayList<>(); + this.searchFields = new ArrayList<>(); + this.hitHighlighting = new HashMap<>(); //Use a linked hashmap since sometimes insertion order might matter - this.properties = new LinkedHashMap>(); + this.properties = new LinkedHashMap<>(); } @@ -274,7 +272,7 @@ public void setFacetOffset(int facetOffset) { /** * Sets the fields which you want Discovery to return in the search results. * It is HIGHLY recommended to limit the fields returned, as by default - * some backends (like Solr) will return everything. + * some back-ends (like Solr) will return everything. * * @param field field to add to the list of fields returned */ @@ -309,7 +307,7 @@ public Map> getProperties() { public void addProperty(String property, String value) { List toAddList = properties.get(property); if (toAddList == null) { - toAddList = new ArrayList(); + toAddList = new ArrayList<>(); } toAddList.add(value); @@ -322,7 +320,7 @@ public DiscoverHitHighlightingField getHitHighlightingField(String field) { } public List getHitHighlightingFields() { - return new ArrayList(hitHighlighting.values()); + return new ArrayList<>(hitHighlighting.values()); } public void addHitHighlightingField(DiscoverHitHighlightingField hitHighlighting) { @@ -368,7 +366,7 @@ public void addYearRangeFacet(DiscoverySearchFilterFacet facet, FacetYearRange f private List buildFacetQueriesWithGap(int newestYear, int oldestYear, String dateFacet, int gap, int topYear, int facetLimit) { - List facetQueries = new LinkedList<>(); + List facetQueries = new ArrayList<>(); for (int year = topYear; year > oldestYear && (facetQueries.size() < facetLimit); year -= gap) { //Add a filter to remove the last year only if we aren't the last year int bottomYear = year - gap; @@ -392,7 +390,7 @@ private List buildFacetQueriesWithGap(int newestYear, int oldestYear, St } private int getTopYear(int newestYear, int gap) { - return (int) (Math.ceil((float) (newestYear) / gap) * gap); + return (int) (Math.ceil((float) newestYear / gap) * gap); } /** diff --git a/dspace-api/src/main/java/org/dspace/discovery/DiscoverResult.java b/dspace-api/src/main/java/org/dspace/discovery/DiscoverResult.java index b2bd0fc5ff35..00236d2bfe32 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/DiscoverResult.java +++ b/dspace-api/src/main/java/org/dspace/discovery/DiscoverResult.java @@ -7,6 +7,8 @@ */ package org.dspace.discovery; +import static org.dspace.discovery.SolrServiceImpl.SOLR_FIELD_SUFFIX_FACET_PREFIXES; + import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -109,6 +111,9 @@ public List getFacetResult(DiscoverySearchFilterFacet field) { if (facetValues.size() == 0 && field.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)) { facetValues = getFacetResult(field.getIndexFieldName() + ".year"); } + if (facetValues.isEmpty()) { + facetValues = getFacetResult(field.getIndexFieldName() + SOLR_FIELD_SUFFIX_FACET_PREFIXES); + } return ListUtils.emptyIfNull(facetValues); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java b/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java index 97187c79ed2d..21468def6866 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java +++ b/dspace-api/src/main/java/org/dspace/discovery/FullTextContentStreams.java @@ -16,9 +16,9 @@ import java.io.SequenceInputStream; import java.nio.charset.StandardCharsets; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Enumeration; import java.util.Iterator; -import java.util.LinkedList; import java.util.List; import javax.annotation.Nullable; @@ -55,7 +55,7 @@ public FullTextContentStreams(Context context, Item parentItem) throws SQLExcept } protected void init(Item parentItem) { - fullTextStreams = new LinkedList<>(); + fullTextStreams = new ArrayList<>(); if (parentItem != null) { sourceInfo = parentItem.getHandle(); @@ -76,14 +76,19 @@ private void buildFullTextList(Item parentItem) { if (StringUtils.equals(FULLTEXT_BUNDLE, myBundle.getName())) { // a-ha! grab the text out of the bitstreams List bitstreams = myBundle.getBitstreams(); + log.debug("Processing full-text bitstreams. Item handle: " + sourceInfo); for (Bitstream fulltextBitstream : emptyIfNull(bitstreams)) { fullTextStreams.add(new FullTextBitstream(sourceInfo, fulltextBitstream)); - log.debug("Added BitStream: " - + fulltextBitstream.getStoreNumber() + " " - + fulltextBitstream.getSequenceID() + " " - + fulltextBitstream.getName()); + if (fulltextBitstream != null) { + log.debug("Added BitStream: " + + fulltextBitstream.getStoreNumber() + " " + + fulltextBitstream.getSequenceID() + " " + + fulltextBitstream.getName()); + } else { + log.error("Found a NULL bitstream when processing full-text files: item handle:" + sourceInfo); + } } } } @@ -149,8 +154,8 @@ private BitstreamService getBitstreamService() { } private class FullTextBitstream { - private String itemHandle; - private Bitstream bitstream; + private final String itemHandle; + private final Bitstream bitstream; public FullTextBitstream(final String parentHandle, final Bitstream file) { this.itemHandle = parentHandle; @@ -158,16 +163,16 @@ public FullTextBitstream(final String parentHandle, final Bitstream file) { } public String getContentType(final Context context) throws SQLException { - BitstreamFormat format = bitstream.getFormat(context); + BitstreamFormat format = bitstream != null ? bitstream.getFormat(context) : null; return format == null ? null : StringUtils.trimToEmpty(format.getMIMEType()); } public String getFileName() { - return StringUtils.trimToEmpty(bitstream.getName()); + return bitstream != null ? StringUtils.trimToEmpty(bitstream.getName()) : null; } public long getSize() { - return bitstream.getSizeBytes(); + return bitstream != null ? bitstream.getSizeBytes() : -1; } public InputStream getInputStream() throws SQLException, IOException, AuthorizeException { @@ -179,18 +184,25 @@ public String getItemHandle() { } } - private class FullTextEnumeration implements Enumeration { + /** + * {@link Enumeration} is implemented because instances of this class are + * passed to a JDK class that requires this obsolete type. + */ + @SuppressWarnings("JdkObsolete") + private static class FullTextEnumeration implements Enumeration { private final Iterator fulltextIterator; - public FullTextEnumeration(final Iterator fulltextStreams) { - this.fulltextIterator = fulltextStreams; + public FullTextEnumeration(final Iterator fulltextIterator) { + this.fulltextIterator = fulltextIterator; } + @Override public boolean hasMoreElements() { return fulltextIterator.hasNext(); } + @Override public InputStream nextElement() { InputStream inputStream = null; FullTextBitstream bitstream = null; diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java index 4e6fa16177e8..b70e9162f7a1 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexClient.java @@ -7,14 +7,20 @@ */ package org.dspace.discovery; +import static org.dspace.discovery.IndexClientOptions.TYPE_OPTION; + import java.io.IOException; import java.sql.SQLException; +import java.util.Arrays; import java.util.Iterator; +import java.util.List; import java.util.Optional; import java.util.UUID; +import java.util.stream.Collectors; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.StringUtils; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.DSpaceObject; @@ -51,41 +57,34 @@ public void internalRun() throws Exception { return; } + String type = null; + if (commandLine.hasOption(TYPE_OPTION)) { + List indexableObjectTypes = IndexObjectFactoryFactory.getInstance().getIndexFactories().stream() + .map((indexFactory -> indexFactory.getType())).collect(Collectors.toList()); + type = commandLine.getOptionValue(TYPE_OPTION); + if (!indexableObjectTypes.contains(type)) { + handler.handleException(String.format("%s is not a valid indexable object type, options: %s", + type, Arrays.toString(indexableObjectTypes.toArray()))); + } + } + /** Acquire from dspace-services in future */ /** * new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer"); */ - if (indexClientOptions == IndexClientOptions.REMOVE) { - handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index"); - indexer.unIndexContent(context, commandLine.getOptionValue("r")); - } else if (indexClientOptions == IndexClientOptions.CLEAN) { - handler.logInfo("Cleaning Index"); - indexer.cleanIndex(false); - } else if (indexClientOptions == IndexClientOptions.FORCECLEAN) { - handler.logInfo("Cleaning Index"); - indexer.cleanIndex(true); - } else if (indexClientOptions == IndexClientOptions.BUILD || - indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { - handler.logInfo("(Re)building index from scratch."); - indexer.createIndex(context); - if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { - checkRebuildSpellCheck(commandLine, indexer); - } - } else if (indexClientOptions == IndexClientOptions.OPTIMIZE) { - handler.logInfo("Optimizing search core."); - indexer.optimize(); - } else if (indexClientOptions == IndexClientOptions.SPELLCHECK) { - checkRebuildSpellCheck(commandLine, indexer); - } else if (indexClientOptions == IndexClientOptions.INDEX) { - final String param = commandLine.getOptionValue('i'); + Optional indexableObject = Optional.empty(); + + if (indexClientOptions == IndexClientOptions.REMOVE || indexClientOptions == IndexClientOptions.INDEX) { + final String param = indexClientOptions == IndexClientOptions.REMOVE ? commandLine.getOptionValue('r') : + commandLine.getOptionValue('i'); UUID uuid = null; try { uuid = UUID.fromString(param); } catch (Exception e) { - // nothing to do, it should be an handle + // nothing to do, it should be a handle } - Optional indexableObject = Optional.empty(); + if (uuid != null) { final Item item = ContentServiceFactory.getInstance().getItemService().find(context, uuid); if (item != null) { @@ -117,7 +116,36 @@ public void internalRun() throws Exception { if (!indexableObject.isPresent()) { throw new IllegalArgumentException("Cannot resolve " + param + " to a DSpace object"); } - handler.logInfo("Indexing " + param + " force " + commandLine.hasOption("f")); + } + + if (indexClientOptions == IndexClientOptions.REMOVE) { + handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index"); + indexer.unIndexContent(context, indexableObject.get().getUniqueIndexID()); + } else if (indexClientOptions == IndexClientOptions.CLEAN) { + handler.logInfo("Cleaning Index"); + indexer.cleanIndex(); + } else if (indexClientOptions == IndexClientOptions.DELETE) { + handler.logInfo("Deleting Index"); + indexer.deleteIndex(); + } else if (indexClientOptions == IndexClientOptions.BUILD || + indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { + handler.logInfo("(Re)building index from scratch."); + if (StringUtils.isNotBlank(type)) { + handler.logWarning(String.format("Type option, %s, not applicable for entire index rebuild option, b" + + ", type will be ignored", TYPE_OPTION)); + } + indexer.deleteIndex(); + indexer.createIndex(context); + if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) { + checkRebuildSpellCheck(commandLine, indexer); + } + } else if (indexClientOptions == IndexClientOptions.OPTIMIZE) { + handler.logInfo("Optimizing search core."); + indexer.optimize(); + } else if (indexClientOptions == IndexClientOptions.SPELLCHECK) { + checkRebuildSpellCheck(commandLine, indexer); + } else if (indexClientOptions == IndexClientOptions.INDEX) { + handler.logInfo("Indexing " + commandLine.getOptionValue('i') + " force " + commandLine.hasOption("f")); final long startTimeMillis = System.currentTimeMillis(); final long count = indexAll(indexer, ContentServiceFactory.getInstance(). getItemService(), context, indexableObject.get()); @@ -125,17 +153,15 @@ public void internalRun() throws Exception { handler.logInfo("Indexed " + count + " object" + (count > 1 ? "s" : "") + " in " + seconds + " seconds"); } else if (indexClientOptions == IndexClientOptions.UPDATE || indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) { - handler.logInfo("Updating and Cleaning Index"); - indexer.cleanIndex(false); - indexer.updateIndex(context, false); + handler.logInfo("Updating Index"); + indexer.updateIndex(context, false, type); if (indexClientOptions == IndexClientOptions.UPDATEANDSPELLCHECK) { checkRebuildSpellCheck(commandLine, indexer); } } else if (indexClientOptions == IndexClientOptions.FORCEUPDATE || indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) { - handler.logInfo("Updating and Cleaning Index"); - indexer.cleanIndex(true); - indexer.updateIndex(context, true); + handler.logInfo("Updating Index"); + indexer.updateIndex(context, true, type); if (indexClientOptions == IndexClientOptions.FORCEUPDATEANDSPELLCHECK) { checkRebuildSpellCheck(commandLine, indexer); } @@ -180,7 +206,7 @@ private static long indexAll(final IndexingService indexingService, indexingService.indexContent(context, dso, true, true); count++; if (dso.getIndexedObject() instanceof Community) { - final Community community = (Community) dso; + final Community community = (Community) dso.getIndexedObject(); final String communityHandle = community.getHandle(); for (final Community subcommunity : community.getSubcommunities()) { count += indexAll(indexingService, itemService, context, new IndexableCommunity(subcommunity)); diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexClientOptions.java b/dspace-api/src/main/java/org/dspace/discovery/IndexClientOptions.java index 4b29fbbf27c5..0de5b22d0655 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexClientOptions.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexClientOptions.java @@ -8,8 +8,13 @@ package org.dspace.discovery; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; +import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; /** * This Enum holds all the possible options and combinations for the Index discovery script @@ -17,7 +22,7 @@ public enum IndexClientOptions { REMOVE, CLEAN, - FORCECLEAN, + DELETE, BUILD, BUILDANDSPELLCHECK, OPTIMIZE, @@ -29,6 +34,8 @@ public enum IndexClientOptions { FORCEUPDATEANDSPELLCHECK, HELP; + public static final String TYPE_OPTION = "t"; + /** * This method resolves the CommandLine parameters to figure out which action the index-discovery script should * perform @@ -41,11 +48,9 @@ protected static IndexClientOptions getIndexClientOption(CommandLine commandLine } else if (commandLine.hasOption("r")) { return IndexClientOptions.REMOVE; } else if (commandLine.hasOption("c")) { - if (commandLine.hasOption("f")) { - return IndexClientOptions.FORCECLEAN; - } else { - return IndexClientOptions.CLEAN; - } + return IndexClientOptions.CLEAN; + } else if (commandLine.hasOption("d")) { + return IndexClientOptions.DELETE; } else if (commandLine.hasOption("b")) { if (commandLine.hasOption("s")) { return IndexClientOptions.BUILDANDSPELLCHECK; @@ -73,25 +78,24 @@ protected static IndexClientOptions getIndexClientOption(CommandLine commandLine protected static Options constructOptions() { Options options = new Options(); + List indexableObjectTypes = IndexObjectFactoryFactory.getInstance().getIndexFactories().stream() + .map((indexFactory -> indexFactory.getType())).collect(Collectors.toList()); options .addOption("r", "remove", true, "remove an Item, Collection or Community from index based on its handle"); - options.getOption("r").setType(String.class); options.addOption("i", "index", true, "add or update an Item, Collection or Community based on its handle or uuid"); - options.getOption("i").setType(boolean.class); + options.addOption(TYPE_OPTION, "type", true, "reindex only specific type of " + + "(re)indexable objects; options: " + Arrays.toString(indexableObjectTypes.toArray())); options.addOption("c", "clean", false, "clean existing index removing any documents that no longer exist in the db"); - options.getOption("c").setType(boolean.class); + options.addOption("d", "delete", false, + "delete all records from existing index"); options.addOption("b", "build", false, "(re)build index, wiping out current one if it exists"); - options.getOption("b").setType(boolean.class); options.addOption("s", "spellchecker", false, "Rebuild the spellchecker, can be combined with -b and -f."); - options.getOption("s").setType(boolean.class); options.addOption("f", "force", false, "if updating existing index, force each handle to be reindexed even if uptodate"); - options.getOption("f").setType(boolean.class); options.addOption("h", "help", false, "print this help message"); - options.getOption("h").setType(boolean.class); return options; } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java index 8bf3cf2aba62..8707b733a637 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.discovery; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link IndexClient} script */ public class IndexDiscoveryScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -30,15 +22,6 @@ public Class getDspaceRunnableClass() { return dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java index 5f1f8b0b0e90..80602ac80459 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java @@ -7,6 +7,7 @@ */ package org.dspace.discovery; +import java.sql.SQLException; import java.util.HashSet; import java.util.Optional; import java.util.Set; @@ -37,6 +38,8 @@ public class IndexEventConsumer implements Consumer { // collect Items, Collections, Communities that need indexing private Set objectsToUpdate = new HashSet<>(); + // collect freshly created Items that need indexing (requires pre-db status) + private Set createdItemsToUpdate = new HashSet<>(); // unique search IDs to delete private Set uniqueIdsToDelete = new HashSet<>(); @@ -65,6 +68,7 @@ public void consume(Context ctx, Event event) throws Exception { if (objectsToUpdate == null) { objectsToUpdate = new HashSet<>(); uniqueIdsToDelete = new HashSet<>(); + createdItemsToUpdate = new HashSet<>(); } int st = event.getSubjectType(); @@ -143,13 +147,18 @@ public void consume(Context ctx, Event event) throws Exception { String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString(); uniqueIdsToDelete.add(detail); } + objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject)); } break; case Event.REMOVE: case Event.ADD: - if (object == null) { + // At this time, ADD and REMOVE actions are ignored on SITE object. They are only triggered for + // top-level communities. No action is necessary as Community itself is indexed (or deleted) separately. + if (event.getSubjectType() == Constants.SITE) { + log.debug(event.getEventTypeAsString() + " event triggered for Site object. Skipping it."); + } else if (object == null) { log.warn(event.getEventTypeAsString() + " event, could not get object for " + event.getObjectTypeAsString() + " id=" + event.getObjectID() @@ -162,7 +171,7 @@ public void consume(Context ctx, Event event) throws Exception { // also update the object in order to index mapped/unmapped Items if (subject != null && subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) { - objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object)); + createdItemsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object)); } } break; @@ -196,6 +205,10 @@ public void consume(Context ctx, Event event) throws Exception { @Override public void end(Context ctx) throws Exception { + // Change the mode to readonly to improve performance + Context.Mode originalMode = ctx.getCurrentMode(); + ctx.setMode(Context.Mode.READ_ONLY); + try { for (String uid : uniqueIdsToDelete) { try { @@ -209,23 +222,11 @@ public void end(Context ctx) throws Exception { } // update the changed Items not deleted because they were on create list for (IndexableObject iu : objectsToUpdate) { - /* we let all types through here and - * allow the search indexer to make - * decisions on indexing and/or removal - */ - iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject())); - String uniqueIndexID = iu.getUniqueIndexID(); - if (uniqueIndexID != null) { - try { - indexer.indexContent(ctx, iu, true, false); - log.debug("Indexed " - + iu.getTypeText() - + ", id=" + iu.getID() - + ", unique_id=" + uniqueIndexID); - } catch (Exception e) { - log.error("Failed while indexing object: ", e); - } - } + indexObject(ctx, iu, false); + } + // update the created Items with a pre-db status + for (IndexableObject iu : createdItemsToUpdate) { + indexObject(ctx, iu, true); } } finally { if (!objectsToUpdate.isEmpty() || !uniqueIdsToDelete.isEmpty()) { @@ -235,6 +236,29 @@ public void end(Context ctx) throws Exception { // "free" the resources objectsToUpdate.clear(); uniqueIdsToDelete.clear(); + createdItemsToUpdate.clear(); + } + + ctx.setMode(originalMode); + } + } + + private void indexObject(Context ctx, IndexableObject iu, boolean preDb) throws SQLException { + /* we let all types through here and + * allow the search indexer to make + * decisions on indexing and/or removal + */ + iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject())); + String uniqueIndexID = iu.getUniqueIndexID(); + if (uniqueIndexID != null) { + try { + indexer.indexContent(ctx, iu, true, false, preDb); + log.debug("Indexed " + + iu.getTypeText() + + ", id=" + iu.getID() + + ", unique_id=" + uniqueIndexID); + } catch (Exception e) { + log.error("Failed while indexing object: ", e); } } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexingService.java b/dspace-api/src/main/java/org/dspace/discovery/IndexingService.java index 46795d759e45..2ef5affa47b7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexingService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexingService.java @@ -9,7 +9,9 @@ import java.io.IOException; import java.sql.SQLException; +import java.util.Map; +import org.apache.solr.client.solrj.SolrServerException; import org.dspace.core.Context; /** @@ -30,6 +32,17 @@ void indexContent(Context context, IndexableObject dso, void indexContent(Context context, IndexableObject dso, boolean force, boolean commit) throws SQLException, SearchServiceException; + /** + * Index a given DSO + * @param context The DSpace Context + * @param dso The DSpace Object to index + * @param force Force update even if not stale + * @param commit Commit the changes + * @param preDb Add a "preDB" status to the index (only applicable to Items) + */ + void indexContent(Context context, IndexableObject dso, + boolean force, boolean commit, boolean preDb) throws SQLException, SearchServiceException; + void unIndexContent(Context context, IndexableObject dso) throws SQLException, IOException; @@ -53,12 +66,24 @@ void reIndexContent(Context context, IndexableObject dso) void updateIndex(Context context, boolean force, String type); - void cleanIndex(boolean force) throws IOException, - SQLException, SearchServiceException; + void cleanIndex() throws IOException, SQLException, SearchServiceException; + + void deleteIndex(); void commit() throws SearchServiceException; void optimize() throws SearchServiceException; void buildSpellCheck() throws SearchServiceException, IOException; + + /** + * Atomically update the index of a single field for an object + * @param context The DSpace context + * @param uniqueIndexId The unqiue index ID of the object to update the index for + * @param field The field to update + * @param fieldModifier The modifiers for the field to update. More information on how to atomically update a solr + * field using a field modifier can be found here: https://yonik.com/solr/atomic-updates/ + */ + void atomicUpdate(Context context, String uniqueIndexId, String field, Map fieldModifier) + throws SolrServerException, IOException; } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java b/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java new file mode 100644 index 000000000000..aa90ccf4a371 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexingUtils.java @@ -0,0 +1,123 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.core.Context; + +/** + * Util methods used by indexing. + * + * @author Koen Pauwels (koen.pauwels at atmire dot com) + */ +public class IndexingUtils { + private IndexingUtils() { + } + + /** + * Retrieve all ancestor communities of a given community, with the first one being the given community and the + * last one being the root. + *

    + * + * @param context DSpace context object + * @param community Community for which we search the ancestors + * @return A list of ancestor communities. + * @throws SQLException if database error + */ + static List getAncestorCommunities(Context context, Community community) throws SQLException { + ArrayList communities = new ArrayList<>(); + while (community != null) { + communities.add(community); + community = (Community) ContentServiceFactory.getInstance().getDSpaceObjectService(community) + .getParentObject(context, community); + } + return communities; + } + + /** + * Retrieve the ids of all groups that have ADMIN rights to the given community, either directly + * (through direct resource policy) or indirectly (through a policy on an ancestor community). + * + * @param context DSpace context object + * @param community Community for which we search the admin group IDs + * @return A list of admin group IDs + * @throws SQLException if database error + */ + static List findTransitiveAdminGroupIds(Context context, Community community) throws SQLException { + return getAncestorCommunities(context, community).stream() + .filter(parent -> parent.getAdministrators() != null) + .map(parent -> parent.getAdministrators().getID()) + .collect(Collectors.toList()); + } + + /** + * Retrieve the ids of all groups that have ADMIN rights to the given collection, either directly + * (through direct resource policy) or indirectly (through a policy on its community, or one of + * its ancestor communities). + * + * @param context DSpace context object + * @param collection Collection for which we search the admin group IDs + * @return A list of admin group IDs + * @throws SQLException if database error + */ + static List findTransitiveAdminGroupIds(Context context, Collection collection) throws SQLException { + List ids = new ArrayList<>(); + if (collection.getAdministrators() != null) { + ids.add(collection.getAdministrators().getID()); + } + for (Community community : collection.getCommunities()) { + for (UUID id : findTransitiveAdminGroupIds(context, community)) { + ids.add(id); + } + } + return ids; + } + + /** + * Retrieve group and eperson IDs for all groups and eperson who have _any_ of the given authorizations + * on the given DSpaceObject. The resulting IDs are prefixed with "e" in the case of an eperson ID, and "g" in the + * case of a group ID. + * + * @param authService The authentication service + * @param context DSpace context object + * @param obj DSpaceObject for which we search the admin group IDs + * @return A stream of admin group IDs as Strings, prefixed with either "e" or "g", depending on whether it is a + * group or eperson ID. + * @throws SQLException if database error + */ + static List findDirectlyAuthorizedGroupAndEPersonPrefixedIds( + AuthorizeService authService, Context context, DSpaceObject obj, int[] authorizations) + throws SQLException { + ArrayList prefixedIds = new ArrayList<>(); + for (int auth : authorizations) { + for (ResourcePolicy policy : authService.getPoliciesActionFilter(context, obj, auth)) { + // Avoid NPE in cases where the policy does not have group or eperson + if (policy.getGroup() == null && policy.getEPerson() == null) { + continue; + } + String prefixedId = policy.getGroup() == null + ? "e" + policy.getEPerson().getID() + : "g" + policy.getGroup().getID(); + prefixedIds.add(prefixedId); + context.uncacheEntity(policy); + } + } + return prefixedIds; + } +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchService.java b/dspace-api/src/main/java/org/dspace/discovery/SearchService.java index 9b6ac0109d40..cb945648e7cd 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchService.java @@ -8,6 +8,7 @@ package org.dspace.discovery; import java.sql.SQLException; +import java.util.Iterator; import java.util.List; import org.dspace.content.Item; @@ -38,6 +39,7 @@ public interface SearchService { DiscoverResult search(Context context, DiscoverQuery query) throws SearchServiceException; + /** * Convenient method to call @see #search(Context, DSpaceObject, * DiscoverQuery, boolean) with includeWithdrawn=false @@ -52,9 +54,22 @@ DiscoverResult search(Context context, DiscoverQuery query) DiscoverResult search(Context context, IndexableObject dso, DiscoverQuery query) throws SearchServiceException; + /** + * Convenience method to call @see #search(Context, DSpaceObject, DiscoverQuery) and getting an iterator for the + * results + * + * @param context DSpace context object + * @param dso a DSpace object to use as a scope of the search + * @param query the discovery query object + * @return an iterator iterating over all results from the search + * @throws SearchServiceException if search error + */ + Iterator iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query) + throws SearchServiceException; + List search(Context context, String query, String orderfield, boolean ascending, int offset, - int max, String... filterquery); + int max, String... filterquery); /** * Transforms the given string field and value into a filter query diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 90afb09eca99..60bf52836bef 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -18,8 +18,12 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.utils.DiscoverQueryBuilder; import org.dspace.kernel.ServiceManager; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.workflow.WorkflowItem; @@ -51,6 +55,9 @@ public class SearchUtils { */ private SearchUtils() { } + /** + * Return an instance of the {@link SearchService}. + */ public static SearchService getSearchService() { if (searchService == null) { org.dspace.kernel.ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); @@ -59,35 +66,90 @@ public static SearchService getSearchService() { return searchService; } + /** + * Clear the cached {@link SearchService} instance, forcing it to be retrieved from the service manager again + * next time {@link SearchUtils#getSearchService} is called. + * In practice, this is only necessary for integration tests in some environments + * where the cached version may no longer be up to date between tests. + */ + public static void clearCachedSearchService() { + searchService = null; + } + + /** + * Retrieves the Discovery Configuration for a null context, prefix and DSpace object. + * This will result in returning the default configuration + * @return the default configuration + */ public static DiscoveryConfiguration getDiscoveryConfiguration() { - return getDiscoveryConfiguration(null, null); + return getDiscoveryConfiguration(null, null, null); } - public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) { - return getDiscoveryConfiguration(null, dso); + /** + * Retrieves the Discovery Configuration with a null prefix for a DSpace object. + * @param context + * the dabase context + * @param dso + * the DSpace object + * @return the Discovery Configuration for the specified DSpace object + */ + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, DSpaceObject dso) { + return getDiscoveryConfiguration(context, null, dso); } /** * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * null prefix mean the normal query, other predefined values are workspace or workflow - * + * + * + * @param context + * the database context * @param prefix * the namespace of the configuration to lookup if any * @param dso * the DSpaceObject * @return the discovery configuration for the specified scope */ - public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) { + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, String prefix, + DSpaceObject dso) { if (prefix != null) { return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); } else { - return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null); + return getDiscoveryConfigurationByDSO(context, dso); } } + /** + * Retrieve the configuration for the current dspace object and all its parents and add it to the provided set + * @param context - The database context + * @param configurations - The set of configurations to add the retrieved configurations to + * @param prefix - The namespace of the configuration to lookup if any + * @param dso - The DSpace Object + * @return the set of configurations with additional retrieved ones for the dspace object and parents + * @throws SQLException + */ + public static Set addDiscoveryConfigurationForParents( + Context context, Set configurations, String prefix, DSpaceObject dso) + throws SQLException { + if (dso == null) { + configurations.add(getDiscoveryConfigurationByName(null)); + return configurations; + } + if (prefix != null) { + configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle())); + } else { + configurations.add(getDiscoveryConfigurationByName(dso.getHandle())); + } + + DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance() + .getDSpaceObjectService(dso); + DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso); + return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject); + } + /** * Return the discovery configuration identified by the specified name - * + * * @param configurationName the configuration name assigned to the bean in the * discovery.xml * @return the discovery configuration @@ -99,6 +161,18 @@ public static DiscoveryConfiguration getDiscoveryConfigurationByName( return configurationService.getDiscoveryConfiguration(configurationName); } + /** + * Return the discovery configuration for the provided DSO + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO + */ + public static DiscoveryConfiguration getDiscoveryConfigurationByDSO( + Context context, DSpaceObject dso) { + DiscoveryConfigurationService configurationService = getConfigurationService(); + return configurationService.getDiscoveryDSOConfiguration(context, dso); + } + public static DiscoveryConfigurationService getConfigurationService() { ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); return manager @@ -113,47 +187,55 @@ public static List getIgnoredMetadataFields(int type) { * Method that retrieves a list of all the configuration objects from the given item * A configuration object can be returned for each parent community/collection * + * @param context the database context * @param item the DSpace item * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item) throws SQLException { + public static List getAllDiscoveryConfigurations(Context context, Item item) + throws SQLException { List collections = item.getCollections(); - return getAllDiscoveryConfigurations(null, collections, item); + return getAllDiscoveryConfigurations(context, null, collections, item); } /** * Return all the discovery configuration applicable to the provided workspace item + * + * @param context * @param witem a workspace item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkspaceItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workspace", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem()); } /** * Return all the discovery configuration applicable to the provided workflow item + * + * @param context * @param witem a workflow item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkflowItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workflow", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem()); } - private static List getAllDiscoveryConfigurations(String prefix, + private static List getAllDiscoveryConfigurations(final Context context, + String prefix, List collections, Item item) throws SQLException { Set result = new HashSet<>(); for (Collection collection : collections) { - DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection); - result.add(configuration); + addDiscoveryConfigurationForParents(context, result, prefix, collection); } //Add alwaysIndex configurations @@ -170,4 +252,10 @@ private static void addConfigurationIfExists(Set result, DiscoveryConfiguration configurationExtra = getDiscoveryConfigurationByName(confName); result.add(configurationExtra); } + + public static DiscoverQueryBuilder getQueryBuilder() { + ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); + return manager + .getServiceByName(DiscoverQueryBuilder.class.getName(), DiscoverQueryBuilder.class); + } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java b/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java index 47288ece34ff..f31feab6123a 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrSearchCore.java @@ -8,6 +8,7 @@ package org.dspace.discovery; import java.io.IOException; +import javax.inject.Named; import org.apache.commons.validator.routines.UrlValidator; import org.apache.logging.log4j.LogManager; @@ -18,13 +19,13 @@ import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.service.impl.HttpConnectionPoolService; import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.storage.rdbms.DatabaseUtils; import org.springframework.beans.factory.annotation.Autowired; /** - * Bean containing the SolrClient for the search core + * Bean containing the SolrClient for the search core. * @author Kevin Van de Velde (kevin at atmire dot com) */ public class SolrSearchCore { @@ -34,6 +35,8 @@ public class SolrSearchCore { protected IndexingService indexingService; @Autowired protected ConfigurationService configurationService; + @Autowired @Named("solrHttpConnectionPoolService") + protected HttpConnectionPoolService httpConnectionPoolService; /** * SolrServer for processing indexing events. @@ -71,15 +74,16 @@ public SolrClient getSolr() { */ protected void initSolr() { if (solr == null) { - String solrService = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("discovery.search.server"); + String solrService = configurationService.getProperty("discovery.search.server"); UrlValidator urlValidator = new UrlValidator(UrlValidator.ALLOW_LOCAL_URLS); if (urlValidator.isValid(solrService) || configurationService .getBooleanProperty("discovery.solr.url.validation.enabled", true)) { try { - log.debug("Solr URL: " + solrService); - HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService).build(); + log.debug("Solr URL: {}", solrService); + HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService) + .withHttpClient(httpConnectionPoolService.getClient()) + .build(); solrServer.setBaseURL(solrService); solrServer.setUseMultiPartPost(true); @@ -97,10 +101,13 @@ protected void initSolr() { solr = solrServer; } catch (SolrServerException | IOException e) { - log.error("Error while initializing solr server", e); + log.error("Error while initializing solr server {}", + solrService, e); + throw new RuntimeException("Failed to contact Solr at " + solrService + + " : " + e.getMessage()); } } else { - log.error("Error while initializing solr, invalid url: " + solrService); + log.error("Error while initializing solr, invalid url: {}", solrService); } } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java index 3f5e765b0ed4..7aece5acf313 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java @@ -53,10 +53,20 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So if (bitstreams != null) { for (Bitstream bitstream : bitstreams) { document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName()); + // Add _keyword and _filter fields which are necessary to support filtering and faceting + // for the file names + document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName()); String description = bitstream.getDescription(); - if ((description != null) && (!description.isEmpty())) { + if ((description != null) && !description.isEmpty()) { document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); + // Add _keyword and _filter fields which are necessary to support filtering and + // faceting for the descriptions + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", + description); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", + description); } } } @@ -65,4 +75,4 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So } } } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java index 0791824085d7..cd3797e3e34e 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java @@ -8,6 +8,8 @@ package org.dspace.discovery; import static java.util.stream.Collectors.joining; +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD; +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB; import java.io.IOException; import java.io.PrintWriter; @@ -59,7 +61,7 @@ import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; import org.dspace.discovery.configuration.DiscoveryMoreLikeThisConfiguration; @@ -103,6 +105,10 @@ public class SolrServiceImpl implements SearchService, IndexingService { private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrServiceImpl.class); + // Suffix of the solr field used to index the facet/filter so that the facet search can search all word in a + // facet by indexing "each word to end of value' partial value + public static final String SOLR_FIELD_SUFFIX_FACET_PREFIXES = "_prefix"; + @Autowired protected ContentServiceFactory contentServiceFactory; @Autowired @@ -118,8 +124,6 @@ protected SolrServiceImpl() { } - - /** * If the handle for the "dso" already exists in the index, and the "dso" * has a lastModified timestamp that is newer than the document in the index @@ -153,7 +157,7 @@ public void indexContent(Context context, IndexableObject indexableObject, getIndexableObjectFactory(indexableObject); if (force || requiresIndexing(indexableObject.getUniqueIndexID(), indexableObject.getLastModified())) { update(context, indexableObjectFactory, indexableObject); - log.info(LogManager.getHeader(context, "indexed_object", indexableObject.getUniqueIndexID())); + log.info(LogHelper.getHeader(context, "indexed_object", indexableObject.getUniqueIndexID())); } } catch (IOException | SQLException | SolrServerException | SearchServiceException e) { log.error(e.getMessage(), e); @@ -166,6 +170,24 @@ protected void update(Context context, IndexFactory indexableObjectService, indexableObjectService.writeDocument(context, indexableObject, solrInputDocument); } + /** + * Update the given indexable object using a given service + * @param context The DSpace Context + * @param indexableObjectService The service to index the object with + * @param indexableObject The object to index + * @param preDB Add a "preDB" status to the document + */ + protected void update(Context context, IndexFactory indexableObjectService, IndexableObject indexableObject, + boolean preDB) throws IOException, SQLException, SolrServerException { + if (preDB) { + final SolrInputDocument solrInputDocument = + indexableObjectService.buildNewDocument(context, indexableObject); + indexableObjectService.writeDocument(context, indexableObject, solrInputDocument); + } else { + update(context, indexableObjectService, indexableObject); + } + } + /** * unIndex removes an Item, Collection, or Community * @@ -234,7 +256,12 @@ public void unIndexContent(Context context, String searchUniqueID, boolean commi try { if (solrSearchCore.getSolr() != null) { - indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID).delete(searchUniqueID); + IndexFactory index = indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID); + if (index != null) { + index.delete(searchUniqueID); + } else { + log.warn("Object not found in Solr index: " + searchUniqueID); + } if (commit) { solrSearchCore.getSolr().commit(); } @@ -333,17 +360,31 @@ public void updateIndex(Context context, boolean force, String type) { } } + /** + * Removes all documents from the Lucene index + */ + public void deleteIndex() { + try { + final List indexableObjectServices = indexObjectServiceFactory. + getIndexFactories(); + for (IndexFactory indexableObjectService : indexableObjectServices) { + indexableObjectService.deleteAll(); + } + } catch (IOException | SolrServerException e) { + log.error("Error cleaning discovery index: " + e.getMessage(), e); + } + } + /** * Iterates over all documents in the Lucene index and verifies they are in * database, if not, they are removed. * - * @param force whether or not to force a clean index * @throws IOException IO exception * @throws SQLException sql exception * @throws SearchServiceException occurs when something went wrong with querying the solr server */ @Override - public void cleanIndex(boolean force) throws IOException, SQLException, SearchServiceException { + public void cleanIndex() throws IOException, SQLException, SearchServiceException { Context context = new Context(); context.turnOffAuthorisationSystem(); @@ -351,56 +392,48 @@ public void cleanIndex(boolean force) throws IOException, SQLException, SearchSe if (solrSearchCore.getSolr() == null) { return; } - if (force) { - final List indexableObjectServices = indexObjectServiceFactory. - getIndexFactories(); - for (IndexFactory indexableObjectService : indexableObjectServices) { - indexableObjectService.deleteAll(); - } - } else { - // First, we'll just get a count of the total results - SolrQuery countQuery = new SolrQuery("*:*"); - countQuery.setRows(0); // don't actually request any data - // Get the total amount of results - QueryResponse totalResponse = solrSearchCore.getSolr().query(countQuery, - solrSearchCore.REQUEST_METHOD); - long total = totalResponse.getResults().getNumFound(); - - int start = 0; - int batch = 100; - - // Now get actual Solr Documents in batches - SolrQuery query = new SolrQuery(); - query.setFields(SearchUtils.RESOURCE_UNIQUE_ID, SearchUtils.RESOURCE_ID_FIELD, - SearchUtils.RESOURCE_TYPE_FIELD); - query.addSort(SearchUtils.RESOURCE_UNIQUE_ID, SolrQuery.ORDER.asc); - query.setQuery("*:*"); - query.setRows(batch); - // Keep looping until we hit the total number of Solr docs - while (start < total) { - query.setStart(start); - QueryResponse rsp = solrSearchCore.getSolr().query(query, solrSearchCore.REQUEST_METHOD); - SolrDocumentList docs = rsp.getResults(); - - for (SolrDocument doc : docs) { - String uniqueID = (String) doc.getFieldValue(SearchUtils.RESOURCE_UNIQUE_ID); - - IndexableObject o = findIndexableObject(context, doc); - - if (o == null) { - log.info("Deleting: " + uniqueID); - /* - * Use IndexWriter to delete, its easier to manage - * write.lock - */ - unIndexContent(context, uniqueID); - } else { - log.debug("Keeping: " + o.getUniqueIndexID()); - } + // First, we'll just get a count of the total results + SolrQuery countQuery = new SolrQuery("*:*"); + countQuery.setRows(0); // don't actually request any data + // Get the total amount of results + QueryResponse totalResponse = solrSearchCore.getSolr().query(countQuery, + solrSearchCore.REQUEST_METHOD); + long total = totalResponse.getResults().getNumFound(); + + int start = 0; + int batch = 100; + + // Now get actual Solr Documents in batches + SolrQuery query = new SolrQuery(); + query.setFields(SearchUtils.RESOURCE_UNIQUE_ID, SearchUtils.RESOURCE_ID_FIELD, + SearchUtils.RESOURCE_TYPE_FIELD); + query.addSort(SearchUtils.RESOURCE_UNIQUE_ID, SolrQuery.ORDER.asc); + query.setQuery("*:*"); + query.setRows(batch); + // Keep looping until we hit the total number of Solr docs + while (start < total) { + query.setStart(start); + QueryResponse rsp = solrSearchCore.getSolr().query(query, solrSearchCore.REQUEST_METHOD); + SolrDocumentList docs = rsp.getResults(); + + for (SolrDocument doc : docs) { + String uniqueID = (String) doc.getFieldValue(SearchUtils.RESOURCE_UNIQUE_ID); + + IndexableObject o = findIndexableObject(context, doc); + + if (o == null) { + log.info("Deleting: " + uniqueID); + /* + * Use IndexWriter to delete, its easier to manage + * write.lock + */ + unIndexContent(context, uniqueID); + } else { + log.debug("Keeping: " + o.getUniqueIndexID()); } - - start += batch; } + + start += batch; } } catch (IOException | SQLException | SolrServerException e) { log.error("Error cleaning discovery index: " + e.getMessage(), e); @@ -448,6 +481,16 @@ public void buildSpellCheck() } } + @Override + public void atomicUpdate(Context context, String uniqueIndexId, String field, Map fieldModifier) + throws SolrServerException, IOException { + SolrInputDocument solrInputDocument = new SolrInputDocument(); + solrInputDocument.addField(SearchUtils.RESOURCE_UNIQUE_ID, uniqueIndexId); + solrInputDocument.addField(field, fieldModifier); + + solrSearchCore.getSolr().add(solrInputDocument); + } + // ////////////////////////////////// // Private // ////////////////////////////////// @@ -704,16 +747,21 @@ public DiscoverResult search(Context context, IndexableObject dso, DiscoverQuery discoveryQuery.addFilterQueries("location:l" + dso.getID()); } else if (dso instanceof IndexableItem) { discoveryQuery.addFilterQueries(SearchUtils.RESOURCE_UNIQUE_ID + ":" + dso. - getUniqueIndexID()); + getUniqueIndexID()); } } return search(context, discoveryQuery); } + @Override + public Iterator iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query) + throws SearchServiceException { + return new SearchIterator(context, dso, query); + } @Override - public DiscoverResult search(Context context, DiscoverQuery discoveryQuery ) + public DiscoverResult search(Context context, DiscoverQuery discoveryQuery) throws SearchServiceException { try { if (solrSearchCore.getSolr() == null) { @@ -727,6 +775,72 @@ public DiscoverResult search(Context context, DiscoverQuery discoveryQuery ) } } + /** + * This class implements an iterator over items that is specifically used to iterate over search results + */ + private class SearchIterator implements Iterator { + private Context context; + private DiscoverQuery discoverQuery; + private DiscoverResult discoverResult; + private IndexableObject dso; + private int absoluteCursor; + private int relativeCursor; + private int pagesize; + + SearchIterator(Context context, DiscoverQuery discoverQuery) throws SearchServiceException { + this.context = context; + this.discoverQuery = discoverQuery; + this.absoluteCursor = discoverQuery.getStart(); + initialise(); + } + + SearchIterator(Context context, IndexableObject dso, DiscoverQuery discoverQuery) + throws SearchServiceException { + this.context = context; + this.dso = dso; + this.discoverQuery = discoverQuery; + initialise(); + } + + private void initialise() throws SearchServiceException { + this.relativeCursor = 0; + if (discoverQuery.getMaxResults() != -1) { + pagesize = discoverQuery.getMaxResults(); + } else { + pagesize = 10; + } + discoverQuery.setMaxResults(pagesize); + this.discoverResult = search(context, dso, discoverQuery); + } + + @Override + public boolean hasNext() { + return absoluteCursor < discoverResult.getTotalSearchResults(); + } + + @Override + public Item next() { + //paginate getting results from the discoverquery. + if (relativeCursor == pagesize) { + // get a new page of results when the last element of the previous page has been read + int offset = absoluteCursor; + // reset the position counter for getting element relativecursor on a page + relativeCursor = 0; + discoverQuery.setStart(offset); + try { + discoverResult = search(context, dso, discoverQuery); + } catch (SearchServiceException e) { + log.error("error while getting search results", e); + } + } + // get the element at position relativecursor on a page + IndexableObject res = discoverResult.getIndexableObjects().get(relativeCursor); + relativeCursor++; + absoluteCursor++; + return (Item) res.getIndexedObject(); + } + } + protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQuery) throws SearchServiceException { SolrQuery solrQuery = new SolrQuery(); @@ -747,6 +861,7 @@ protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQ solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD); solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD); solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID); + solrQuery.addField(STATUS_FIELD); if (discoveryQuery.isSpellCheck()) { solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query); @@ -799,6 +914,9 @@ protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQ //Only add facet information if there are any facets for (DiscoverFacetField facetFieldConfig : facetFields) { String field = transformFacetField(facetFieldConfig, facetFieldConfig.getField(), false); + if (facetFieldConfig.getPrefix() != null) { + field = transformPrefixFacetField(facetFieldConfig, facetFieldConfig.getField(), false); + } solrQuery.addFacetField(field); // Setting the facet limit in this fashion ensures that each facet can have its own max @@ -873,7 +991,7 @@ protected DiscoverResult retrieveResult(Context context, DiscoverQuery query) // if we found stale objects we can decide to skip execution of the remaining code to improve performance boolean skipLoadingResponse = false; // use zombieDocs to collect stale found objects - List zombieDocs = new ArrayList(); + List zombieDocs = new ArrayList<>(); QueryResponse solrQueryResponse = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD); if (solrQueryResponse != null) { @@ -890,18 +1008,21 @@ protected DiscoverResult retrieveResult(Context context, DiscoverQuery query) result.addIndexableObject(indexableObject); } else { // log has warn because we try to fix the issue - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "Stale entry found in Discovery index," + " as we could not find the DSpace object it refers to. ", "Unique identifier: " + doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID))); // Enables solr to remove documents related to items not on database anymore (Stale) // if maxAttemps is greater than 0 cleanup the index on each step if (maxAttempts >= 0) { - zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID)); - // avoid to process the response except if we are in the last allowed execution. - // When maxAttempts is 0 this will be just the first and last run as the - // executionCount is increased at the start of the loop it will be equals to 1 - skipLoadingResponse = maxAttempts + 1 != executionCount; + Object statusObj = doc.getFirstValue(STATUS_FIELD); + if (!(statusObj instanceof String && statusObj.equals(STATUS_FIELD_PREDB))) { + zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID)); + // avoid to process the response except if we are in the last allowed execution. + // When maxAttempts is 0 this will be just the first and last run as the + // executionCount is increased at the start of the loop it will be equals to 1 + skipLoadingResponse = maxAttempts + 1 != executionCount; + } } continue; } @@ -910,9 +1031,8 @@ protected DiscoverResult retrieveResult(Context context, DiscoverQuery query) // Add information about our search fields for (String field : searchFields) { List valuesAsString = new ArrayList<>(); - for (Object o : doc.getFieldValues(field)) { - valuesAsString.add(String.valueOf(o)); - } + Optional.ofNullable(doc.getFieldValues(field)) + .ifPresent(l -> l.forEach(o -> valuesAsString.add(String.valueOf(o)))); resultDoc.addSearchField(field, valuesAsString.toArray(new String[valuesAsString.size()])); } result.addSearchDocument(indexableObject, resultDoc); @@ -924,12 +1044,6 @@ protected DiscoverResult retrieveResult(Context context, DiscoverQuery query) //We need to remove all the "_hl" appendix strings from our keys Map> resultMap = new HashMap<>(); for (String key : highlightedFields.keySet()) { - List highlightOriginalValue = highlightedFields.get(key); - List resultHighlightOriginalValue = new ArrayList<>(); - for (String highlightValue : highlightOriginalValue) { - String[] splitted = highlightValue.split("###"); - resultHighlightOriginalValue.add(splitted); - } resultMap.put(key.substring(0, key.lastIndexOf("_hl")), highlightedFields.get(key)); } @@ -945,7 +1059,7 @@ protected DiscoverResult retrieveResult(Context context, DiscoverQuery query) // If any stale entries are found in the current page of results, // we remove those stale entries and rerun the same query again. // Otherwise, the query is valid and the results are returned. - if (zombieDocs.size() != 0) { + if (!zombieDocs.isEmpty()) { log.info("Cleaning " + zombieDocs.size() + " stale objects from Discovery Index"); log.info("ZombieDocs "); zombieDocs.forEach(log::info); @@ -1116,7 +1230,7 @@ public List search(Context context, String query, String orderf } catch (IOException | SQLException | SolrServerException e) { // Any acception that we get ignore it. // We do NOT want any crashed to shown by the user - log.error(LogManager.getHeader(context, "Error while quering solr", "Query: " + query), e); + log.error(LogHelper.getHeader(context, "Error while quering solr", "Query: " + query), e); return new ArrayList<>(0); } } @@ -1168,7 +1282,7 @@ public DiscoverFilterQuery toFilterQuery(Context context, String field, String o //DO NOT ESCAPE RANGE QUERIES ! if (!value.matches("\\[.*TO.*\\]")) { value = ClientUtils.escapeQueryChars(value); - filterQuery.append("(").append(value).append(")"); + filterQuery.append("\"").append(value).append("\""); } else { filterQuery.append(value); } @@ -1224,8 +1338,7 @@ public Object transform(Object input) { } } } catch (IOException | SQLException | SolrServerException e) { - log.error( - LogManager.getHeader(context, "Error while retrieving related items", "Handle: " + log.error(LogHelper.getHeader(context, "Error while retrieving related items", "Handle: " + item.getHandle()), e); } return results; @@ -1242,7 +1355,31 @@ public String toSortFieldIndex(String metadataField, String type) { } } + /** + * Gets the solr field that contains the facet value split on each word break to the end, so can be searched + * on each word in the value, see {@link org.dspace.discovery.indexobject.ItemIndexFactoryImpl + * #saveFacetPrefixParts(SolrInputDocument, DiscoverySearchFilter, String, String)} + * Ony applicable to facets of type {@link DiscoveryConfigurationParameters.TYPE_TEXT}, otherwise uses the regular + * facet filter field + */ + protected String transformPrefixFacetField(DiscoverFacetField facetFieldConfig, String field, + boolean removePostfix) { + if (facetFieldConfig.getType().equals(DiscoveryConfigurationParameters.TYPE_TEXT) || + facetFieldConfig.getType().equals(DiscoveryConfigurationParameters.TYPE_HIERARCHICAL)) { + if (removePostfix) { + return field.substring(0, field.lastIndexOf(SOLR_FIELD_SUFFIX_FACET_PREFIXES)); + } else { + return field + SOLR_FIELD_SUFFIX_FACET_PREFIXES; + } + } else { + return this.transformFacetField(facetFieldConfig, field, removePostfix); + } + } + protected String transformFacetField(DiscoverFacetField facetFieldConfig, String field, boolean removePostfix) { + if (field.contains(SOLR_FIELD_SUFFIX_FACET_PREFIXES)) { + return this.transformPrefixFacetField(facetFieldConfig, field, removePostfix); + } if (facetFieldConfig.getType().equals(DiscoveryConfigurationParameters.TYPE_TEXT)) { if (removePostfix) { return field.substring(0, field.lastIndexOf("_filter")); @@ -1288,7 +1425,7 @@ protected String transformDisplayedValue(Context context, String field, String v if (field.equals("location.comm") || field.equals("location.coll")) { value = locationToName(context, field, value); } else if (field.endsWith("_filter") || field.endsWith("_ac") - || field.endsWith("_acid")) { + || field.endsWith("_acid") || field.endsWith(SOLR_FIELD_SUFFIX_FACET_PREFIXES)) { //We have a filter make sure we split ! String separator = DSpaceServicesFactory.getInstance().getConfigurationService() .getProperty("discovery.solr.facets.split.char"); @@ -1320,7 +1457,7 @@ protected String transformAuthorityValue(Context context, String field, String v return value; } if (field.endsWith("_filter") || field.endsWith("_ac") - || field.endsWith("_acid")) { + || field.endsWith("_acid") || field.endsWith(SOLR_FIELD_SUFFIX_FACET_PREFIXES)) { //We have a filter make sure we split ! String separator = DSpaceServicesFactory.getInstance().getConfigurationService() .getProperty("discovery.solr.facets.split.char"); @@ -1384,6 +1521,28 @@ public void indexContent(Context context, IndexableObject dso, boolean force, } } + @Override + public void indexContent(Context context, IndexableObject indexableObject, boolean force, + boolean commit, boolean preDb) throws SearchServiceException, SQLException { + if (preDb) { + try { + final IndexFactory indexableObjectFactory = indexObjectServiceFactory. + getIndexableObjectFactory(indexableObject); + if (force || requiresIndexing(indexableObject.getUniqueIndexID(), indexableObject.getLastModified())) { + update(context, indexableObjectFactory, indexableObject, true); + log.info(LogHelper.getHeader(context, "indexed_object", indexableObject.getUniqueIndexID())); + } + } catch (IOException | SQLException | SolrServerException | SearchServiceException e) { + log.error(e.getMessage(), e); + } + } else { + indexContent(context, indexableObject, force); + } + if (commit) { + commit(); + } + } + @Override public void commit() throws SearchServiceException { try { @@ -1437,4 +1596,5 @@ public String calculateExtremeValue(Context context, String valueField, } return null; } + } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceIndexCollectionSubmittersPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceIndexCollectionSubmittersPlugin.java index 8c5f92bd97bb..ee93f954a5bd 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceIndexCollectionSubmittersPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceIndexCollectionSubmittersPlugin.java @@ -7,19 +7,20 @@ */ package org.dspace.discovery; +import static org.dspace.discovery.IndexingUtils.findDirectlyAuthorizedGroupAndEPersonPrefixedIds; +import static org.dspace.discovery.IndexingUtils.findTransitiveAdminGroupIds; + import java.sql.SQLException; import java.util.List; +import java.util.UUID; import org.apache.logging.log4j.Logger; import org.apache.solr.common.SolrInputDocument; -import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Collection; -import org.dspace.content.Community; -import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.discovery.indexobject.IndexableCollection; import org.springframework.beans.factory.annotation.Autowired; @@ -42,36 +43,27 @@ public void additionalIndex(Context context, IndexableObject idxObj, SolrInputDo Collection col = ((IndexableCollection) idxObj).getIndexedObject(); if (col != null) { try { - String fieldValue = null; - Community parent = (Community) ContentServiceFactory.getInstance().getDSpaceObjectService(col) - .getParentObject(context, col); - while (parent != null) { - if (parent.getAdministrators() != null) { - fieldValue = "g" + parent.getAdministrators().getID(); - document.addField("submit", fieldValue); - } - parent = (Community) ContentServiceFactory.getInstance().getDSpaceObjectService(parent) - .getParentObject(context, parent); + // Index groups with ADMIN rights on the Collection, on + // Communities containing those Collections, and recursively on any Community containing such a + // Community. + // TODO: Strictly speaking we should also check for epersons who received admin rights directly, + // without being part of the admin group. Finding them may be a lot slower though. + for (UUID unprefixedId : findTransitiveAdminGroupIds(context, col)) { + document.addField("submit", "g" + unprefixedId); } - List policies = authorizeService.getPoliciesActionFilter(context,col,Constants.ADD); - policies.addAll(authorizeService.getPoliciesActionFilter(context, col, Constants.ADMIN)); - - for (ResourcePolicy resourcePolicy : policies) { - if (resourcePolicy.getGroup() != null) { - fieldValue = "g" + resourcePolicy.getGroup().getID(); - } else { - fieldValue = "e" + resourcePolicy.getEPerson().getID(); - } - document.addField("submit", fieldValue); - context.uncacheEntity(resourcePolicy); + // Index groups and epersons with ADD or ADMIN rights on the Collection. + List prefixedIds = findDirectlyAuthorizedGroupAndEPersonPrefixedIds( + authorizeService, context, col, new int[] {Constants.ADD, Constants.ADMIN} + ); + for (String prefixedId : prefixedIds) { + document.addField("submit", prefixedId); } } catch (SQLException e) { - log.error(LogManager.getHeader(context, "Error while indexing resource policies", + log.error(LogHelper.getHeader(context, "Error while indexing resource policies", "Collection: (id " + col.getID() + " type " + col.getName() + ")" )); } } } } - -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceIndexItemEditorsPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceIndexItemEditorsPlugin.java new file mode 100644 index 000000000000..09308be75920 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceIndexItemEditorsPlugin.java @@ -0,0 +1,71 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery; + +import static org.dspace.discovery.IndexingUtils.findDirectlyAuthorizedGroupAndEPersonPrefixedIds; +import static org.dspace.discovery.IndexingUtils.findTransitiveAdminGroupIds; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + +import org.apache.logging.log4j.Logger; +import org.apache.solr.common.SolrInputDocument; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.core.LogHelper; +import org.dspace.discovery.indexobject.IndexableItem; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Indexes policies that yield write access to items. + * + * @author Koen Pauwels at atmire.com + */ +public class SolrServiceIndexItemEditorsPlugin implements SolrServiceIndexPlugin { + private static final Logger log = org.apache.logging.log4j.LogManager + .getLogger(SolrServiceIndexItemEditorsPlugin.class); + + @Autowired(required = true) + protected AuthorizeService authorizeService; + + @Override + public void additionalIndex(Context context, IndexableObject idxObj, SolrInputDocument document) { + if (idxObj instanceof IndexableItem) { + Item item = ((IndexableItem) idxObj).getIndexedObject(); + if (item != null) { + try { + // Index groups with ADMIN rights on Collections containing the Item, on + // Communities containing those Collections, and recursively on any Community containing ssuch a + // Community. + // TODO: Strictly speaking we should also check for epersons who received admin rights directly, + // without being part of the admin group. Finding them may be a lot slower though. + for (Collection collection : item.getCollections()) { + for (UUID unprefixedId : findTransitiveAdminGroupIds(context, collection)) { + document.addField("edit", "g" + unprefixedId); + } + } + + // Index groups and epersons with WRITE or direct ADMIN rights on the Item. + List prefixedIds = findDirectlyAuthorizedGroupAndEPersonPrefixedIds( + authorizeService, context, item, new int[] {Constants.WRITE, Constants.ADMIN} + ); + for (String prefixedId : prefixedIds) { + document.addField("edit", prefixedId); + } + } catch (SQLException e) { + log.error(LogHelper.getHeader(context, "Error while indexing resource policies", + "Item: (id " + item.getID() + " name " + item.getName() + ")" )); + } + } + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceMetadataBrowseIndexingPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceMetadataBrowseIndexingPlugin.java index 081e06d1d949..746a0cb83214 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceMetadataBrowseIndexingPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceMetadataBrowseIndexingPlugin.java @@ -7,6 +7,8 @@ */ package org.dspace.discovery; +import static org.dspace.discovery.SolrServiceImpl.SOLR_FIELD_SUFFIX_FACET_PREFIXES; + import java.util.HashSet; import java.util.List; import java.util.Set; @@ -174,8 +176,13 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So Boolean.FALSE), true); if (!ignorePrefered) { - preferedLabel = choiceAuthorityService - .getLabel(values.get(x), collection, values.get(x).getLanguage()); + try { + preferedLabel = choiceAuthorityService + .getLabel(values.get(x), collection, values.get(x).getLanguage()); + } catch (Exception e) { + log.warn("Failed to get preferred label for " + + values.get(x).getMetadataField().toString('.'), e); + } } List variants = null; @@ -193,9 +200,13 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So Boolean.FALSE), true); if (!ignoreVariants) { - variants = choiceAuthorityService - .getVariants( - values.get(x), collection); + try { + variants = choiceAuthorityService + .getVariants(values.get(x), collection); + } catch (Exception e) { + log.warn("Failed to get variants for " + + values.get(x).getMetadataField().toString(), e); + } } if (StringUtils @@ -252,9 +263,9 @@ public void additionalIndex(Context context, IndexableObject indexableObject, So } } } - for (String facet : distFValues) { document.addField(bi.getDistinctTableName() + "_filter", facet); + document.addField(bi.getDistinctTableName() + SOLR_FIELD_SUFFIX_FACET_PREFIXES, facet); } for (String facet : distFAuths) { document.addField(bi.getDistinctTableName() diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServicePrivateItemPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServicePrivateItemPlugin.java index 87302390e162..db543141e13d 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServicePrivateItemPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServicePrivateItemPlugin.java @@ -16,7 +16,7 @@ import org.apache.solr.client.solrj.SolrQuery; import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.springframework.beans.factory.annotation.Autowired; /** @@ -50,7 +50,7 @@ public void additionalSearchParameters(Context context, DiscoverQuery discoveryQ } } catch (SQLException ex) { - log.error(LogManager.getHeader(context, "Error looking up authorization rights of current user", + log.error(LogHelper.getHeader(context, "Error looking up authorization rights of current user", ""), ex); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceResourceRestrictionPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceResourceRestrictionPlugin.java index e5eb0d901900..d19616a85e10 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceResourceRestrictionPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceResourceRestrictionPlugin.java @@ -28,7 +28,7 @@ import org.dspace.content.service.CommunityService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.discovery.indexobject.IndexableClaimedTask; import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.discovery.indexobject.IndexableInProgressSubmission; @@ -129,7 +129,7 @@ public void additionalIndex(Context context, IndexableObject idxObj, SolrInputDo dso = ContentServiceFactory.getInstance().getDSpaceObjectService(dso).getParentObject(context, dso); } } catch (SQLException e) { - log.error(LogManager.getHeader(context, "Error while indexing resource policies", + log.error(LogHelper.getHeader(context, "Error while indexing resource policies", "DSpace object: (id " + dso.getID() + " type " + dso.getType() + ")" )); } @@ -175,7 +175,7 @@ public void additionalSearchParameters(Context context, DiscoverQuery discoveryQ solrQuery.addFilterQuery(resourceQuery.toString()); } } catch (SQLException e) { - log.error(LogManager.getHeader(context, "Error while adding resource policy information to query", ""), e); + log.error(LogHelper.getHeader(context, "Error while adding resource policy information to query", ""), e); } } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceSupervisionOrderIndexingPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceSupervisionOrderIndexingPlugin.java new file mode 100644 index 000000000000..116b5ec88d1b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceSupervisionOrderIndexingPlugin.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery; + +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.solr.common.SolrInputDocument; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.indexobject.IndexableInProgressSubmission; +import org.dspace.discovery.indexobject.IndexableWorkflowItem; +import org.dspace.discovery.indexobject.IndexableWorkspaceItem; +import org.dspace.supervision.SupervisionOrder; +import org.dspace.supervision.service.SupervisionOrderService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * A Solr Indexing plugin responsible adding a `supervised` field. + * When item being indexed is a workspace or workflow item, + * and at least one supervision order is defined + * the 'supervised' field with value 'true' will be added to the solr document, + * if no supervision orders are defined field will be set to 'false' + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public class SolrServiceSupervisionOrderIndexingPlugin implements SolrServiceIndexPlugin { + + @Autowired(required = true) + private SupervisionOrderService supervisionOrderService; + + @Override + public void additionalIndex(Context context, IndexableObject indexableObject, SolrInputDocument document) { + try { + + if (!(indexableObject instanceof IndexableWorkspaceItem) && + !(indexableObject instanceof IndexableWorkflowItem)) { + return; + } + + Item item = + (((IndexableInProgressSubmission) indexableObject).getIndexedObject()).getItem(); + + if (Objects.isNull(item)) { + return; + } + addSupervisedField(context, item, document); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + private void addSupervisedField(Context context, Item item, SolrInputDocument document) throws SQLException { + List supervisionOrders = supervisionOrderService.findByItem(context, item); + if (CollectionUtils.isNotEmpty(supervisionOrders)) { + document.addField("supervised", true); + } else { + document.addField("supervised", false); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceWorkspaceWorkflowRestrictionPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceWorkspaceWorkflowRestrictionPlugin.java index fd05be1cb521..161849475651 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceWorkspaceWorkflowRestrictionPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceWorkspaceWorkflowRestrictionPlugin.java @@ -40,6 +40,11 @@ public class SolrServiceWorkspaceWorkflowRestrictionPlugin implements SolrServic */ public static final String DISCOVER_WORKFLOW_ADMIN_CONFIGURATION_NAME = "workflowAdmin"; + /** + * The name of the discover configuration used by administrators to search for workspace and workflow tasks + */ + public static final String DISCOVER_SUPERVISION_CONFIGURATION_NAME = "supervision"; + @Autowired(required = true) protected GroupService groupService; @@ -60,18 +65,22 @@ public void additionalSearchParameters( ); boolean isWorkflowAdmin = isAdmin(context) && DISCOVER_WORKFLOW_ADMIN_CONFIGURATION_NAME.equals(discoveryQuery.getDiscoveryConfigurationName()); + + boolean isSupervision = + DISCOVER_SUPERVISION_CONFIGURATION_NAME.equals(discoveryQuery.getDiscoveryConfigurationName()); + EPerson currentUser = context.getCurrentUser(); // extra security check to avoid the possibility that an anonymous user // get access to workspace or workflow - if (currentUser == null && (isWorkflow || isWorkspace)) { + if (currentUser == null && (isWorkflow || isWorkspace || isSupervision)) { throw new IllegalStateException( "An anonymous user cannot perform a workspace or workflow search"); } if (isWorkspace) { // insert filter by submitter solrQuery.addFilterQuery("submitter_authority:(" + currentUser.getID() + ")"); - } else if (isWorkflow && !isWorkflowAdmin) { + } else if ((isWorkflow && !isWorkflowAdmin) || (isSupervision && !isAdmin(context))) { // Retrieve all the groups the current user is a member of ! Set groups; try { diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 636e7ccd2ae4..6cb93e2993f3 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -7,12 +7,23 @@ */ package org.dspace.discovery.configuration; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.services.factory.DSpaceServicesFactory; @@ -22,9 +33,18 @@ */ public class DiscoveryConfigurationService { + private static final Logger log = LogManager.getLogger(); + private Map map; private Map> toIgnoreMetadataFields = new HashMap<>(); + /** + * Discovery configurations, cached by Community/Collection UUID. When a Community or Collection does not have its + * own configuration, we take the one of the first parent that does. + * This cache ensures we do not have to go up the hierarchy every time. + */ + private final Map comColToDiscoveryConfigurationMap = new ConcurrentHashMap<>(); + public Map getMap() { return map; } @@ -41,25 +61,98 @@ public void setToIgnoreMetadataFields(Map> toIgnoreMetadat this.toIgnoreMetadataFields = toIgnoreMetadataFields; } - public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) { + /** + * Retrieve the discovery configuration for the provided IndexableObject. When a DSpace Object can be retrieved from + * the IndexableObject, the discovery configuration will be returned for the DSpace Object. Otherwise, a check will + * be done to look for the unique index ID of the IndexableObject. When the IndexableObject is null, the default + * configuration will be retrieved + * + * When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param indexableObject - The IndexableObject to retrieve the configuration for + * @return the discovery configuration for the provided IndexableObject. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(Context context, IndexableObject indexableObject) { String name; - if (dso == null) { - name = "default"; - } else if (dso instanceof IndexableDSpaceObject) { - name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle(); + if (indexableObject == null) { + return getDiscoveryConfiguration(null); + } else if (indexableObject instanceof IndexableDSpaceObject) { + return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) indexableObject).getIndexedObject()); } else { - name = dso.getUniqueIndexID(); + name = indexableObject.getUniqueIndexID(); } - return getDiscoveryConfiguration(name); } - public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { + /** + * Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO. + */ + public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { + // Fall back to default configuration + if (dso == null) { + return getDiscoveryConfiguration(null, true); + } + + // Attempt to retrieve cached configuration by UUID + if (comColToDiscoveryConfigurationMap.containsKey(dso.getID())) { + return comColToDiscoveryConfigurationMap.get(dso.getID()); + } + + DiscoveryConfiguration configuration; + + // Attempt to retrieve configuration by DSO handle + configuration = getDiscoveryConfiguration(dso.getHandle(), false); + + if (configuration == null) { + // Recurse up the Comm/Coll hierarchy until a configuration is found + DSpaceObjectService dSpaceObjectService = + ContentServiceFactory.getInstance().getDSpaceObjectService(dso); + DSpaceObject parentObject = null; + try { + parentObject = dSpaceObjectService.getParentObject(context, dso); + } catch (SQLException e) { + log.error(e); + } + configuration = getDiscoveryDSOConfiguration(context, parentObject); + } + + // Cache the resulting configuration when the DSO is a Community or Collection + if (dso instanceof Community || dso instanceof Collection) { + comColToDiscoveryConfigurationMap.put(dso.getID(), configuration); + } + + return configuration; + } + + /** + * Retrieve the Discovery Configuration for the provided name. When no configuration can be found for the name, the + * default configuration will be returned. + * @param name - The name of the configuration to be retrieved + * @return the Discovery Configuration for the provided name, or default when none was found. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(String name) { + return getDiscoveryConfiguration(name, true); + } + + /** + * Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration + * will be returned when no match is found. When useDefault is set to false, null will be returned when no match is + * found. + * @param name - The name of the configuration to retrieve + * @param useDefault - Whether the default configuration should be used when no match is found + * @return the configuration for the provided name + */ + public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) { DiscoveryConfiguration result; result = StringUtils.isBlank(name) ? null : getMap().get(name); - if (result == null) { + if (result == null && useDefault) { //No specific configuration, get the default one result = getMap().get("default"); } @@ -67,12 +160,23 @@ public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { return result; } - public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, - final IndexableObject dso) { + /** + * Retrieve the Discovery configuration for the provided name or IndexableObject. The configuration will first be + * checked for the provided name. When no match is found for the name, the configuration will be retrieved for the + * IndexableObject + * + * @param context - The database context + * @param configurationName - The name of the configuration to be retrieved + * @param indexableObject - The indexable object to retrieve the configuration for + * @return the Discovery configuration for the provided name, or when not found for the provided IndexableObject + */ + public DiscoveryConfiguration getDiscoveryConfigurationByNameOrIndexableObject(Context context, + String configurationName, + IndexableObject indexableObject) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); } else { - return getDiscoveryConfiguration(dso); + return getDiscoveryConfiguration(context, indexableObject); } } @@ -92,13 +196,25 @@ public List getIndexAlwaysConfigurations() { return configs; } + /** + * @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet} + */ + public List getAllFacetsConfig() { + List configs = new ArrayList<>(); + for (String key : map.keySet()) { + DiscoveryConfiguration config = map.get(key); + configs.addAll(config.getSidebarFacets()); + } + return configs; + } + public static void main(String[] args) { System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName( - DiscoveryConfigurationService.class - .getName(), - DiscoveryConfigurationService.class); + DiscoveryConfigurationService.class + .getName(), + DiscoveryConfigurationService.class); for (String key : mainService.getMap().keySet()) { System.out.println(key); @@ -126,7 +242,7 @@ public static void main(String[] args) { System.out.println("Recent submissions configuration:"); DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration - .getRecentSubmissionConfiguration(); + .getRecentSubmissionConfiguration(); System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField()); System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax()); @@ -139,4 +255,23 @@ public static void main(String[] args) { } } } + + /** + * Retrieves a list of all DiscoveryConfiguration objects where key starts with prefixConfigurationName + * + * @param prefixConfigurationName string as prefix key + */ + public List getDiscoveryConfigurationWithPrefixName(final String prefixConfigurationName) { + List discoveryConfigurationList = new ArrayList<>(); + if (StringUtils.isNotBlank(prefixConfigurationName)) { + for (String key : map.keySet()) { + if (key.equals(prefixConfigurationName) || key.startsWith(prefixConfigurationName)) { + DiscoveryConfiguration config = map.get(key); + discoveryConfigurationList.add(config); + } + } + } + return discoveryConfigurationList; + } + } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryRelatedItemConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryRelatedItemConfiguration.java new file mode 100644 index 000000000000..6c24a6bac671 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryRelatedItemConfiguration.java @@ -0,0 +1,16 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery.configuration; + +/** + * This class extends {@link DiscoveryConfiguration} and add method for set parameters + * to filter query list + * + * @author Danilo Di Nuzzo (danilo.dinuzzo at 4science.it) + */ +public class DiscoveryRelatedItemConfiguration extends DiscoveryConfiguration {} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java index e251d1bc5118..cd1a4eecb8d4 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java @@ -9,6 +9,7 @@ import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -22,6 +23,11 @@ public class DiscoverySortConfiguration { private List sortFields = new ArrayList(); + /** + * Default sort configuration to use when needed + */ + @Nullable private DiscoverySortFieldConfiguration defaultSortField; + public List getSortFields() { return sortFields; } @@ -30,6 +36,14 @@ public void setSortFields(List sortFields) { this.sortFields = sortFields; } + public DiscoverySortFieldConfiguration getDefaultSortField() { + return defaultSortField; + } + + public void setDefaultSortField(DiscoverySortFieldConfiguration configuration) { + this.defaultSortField = configuration; + } + public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) { if (StringUtils.isBlank(sortField)) { return null; diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortFunctionConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortFunctionConfiguration.java new file mode 100644 index 000000000000..7fb020cd560b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortFunctionConfiguration.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery.configuration; + +import java.io.Serializable; +import java.text.MessageFormat; +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +/** + * + * Extension of {@link DiscoverySortFieldConfiguration} used to configure sorting + * taking advantage of solr function feature. + * + * Order is evaluated by mean of function parameter value and passed in arguments as input. + * + * @author Corrado Lombardi (corrado.lombardi at 4science.it) + * + */ +public class DiscoverySortFunctionConfiguration extends DiscoverySortFieldConfiguration { + + public static final String SORT_FUNCTION = "sort_function"; + private String function; + private List arguments; + private String id; + + public void setFunction(final String function) { + this.function = function; + } + + public void setArguments(final List arguments) { + this.arguments = arguments; + } + + @Override + public String getType() { + return SORT_FUNCTION; + } + + @Override + public String getMetadataField() { + return id; + } + + public void setId(final String id) { + this.id = id; + } + + /** + * Returns the function to be used by solr to sort result + * @param functionArgs variable arguments to be inserted in function + * @return + */ + public String getFunction(final Serializable... functionArgs) { + final String args = String.join(",", Optional.ofNullable(arguments).orElse(Collections.emptyList())); + final String result = function + "(" + args + ")"; + return MessageFormat.format(result, functionArgs); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/AbstractIndexableObject.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/AbstractIndexableObject.java index 0f8d9d0e9204..0fb01f2e5bde 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/AbstractIndexableObject.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/AbstractIndexableObject.java @@ -14,10 +14,13 @@ /** * This class exists in order to provide a default implementation for the equals and hashCode methods. - * Since IndexableObjects can be made multiple times for the same underlying object, we needed a more finetuned - * equals and hashcode methods. We're simply checking that the underlying objects are equal and generating the hashcode - * for the underlying object. This way, we'll always get a proper result when calling equals or hashcode on an - * IndexableObject because it'll depend on the underlying object + * Since IndexableObjects can be made multiple times for the same underlying + * object, we needed more finely-tuned {@link equals} and {@link hashCode} methods. + * We're simply checking that the underlying objects are equal and returning the + * hash-code for the underlying object. This way, we'll always get a proper + * result when calling {@link equals} or {@link hashCode} on an IndexableObject + * because it'll depend on the underlying object. + * * @param Refers to the underlying entity that is linked to this object * @param The type of ID that this entity uses */ diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java index 5130be6cd785..817be7848df7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java @@ -16,6 +16,7 @@ import java.util.Optional; import java.util.UUID; +import org.apache.commons.lang3.StringUtils; import org.apache.solr.common.SolrInputDocument; import org.dspace.content.Collection; import org.dspace.content.Community; @@ -23,6 +24,7 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.discovery.SearchUtils; import org.dspace.discovery.configuration.DiscoveryConfiguration; @@ -84,7 +86,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCollection inde final Collection collection = indexableCollection.getIndexedObject(); // Retrieve configuration - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(collection); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, collection); DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration .getHitHighlightingConfiguration(); List highlightedMetadataFields = new ArrayList<>(); @@ -111,6 +113,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCollection inde CollectionService.MD_LICENSE, Item.ANY); String title = collectionService.getMetadataFirstValue(collection, CollectionService.MD_NAME, Item.ANY); + String entityType = collectionService.getMetadataFirstValue(collection, "dspace", "entity", "type", Item.ANY); List toIgnoreMetadataFields = SearchUtils.getIgnoredMetadataFields(collection.getType()); addContainerMetadataField(doc, highlightedMetadataFields, toIgnoreMetadataFields, "dc.description", @@ -126,6 +129,12 @@ public SolrInputDocument buildDocument(Context context, IndexableCollection inde addContainerMetadataField(doc, highlightedMetadataFields, toIgnoreMetadataFields, "dc.title", title); doc.addField("dc.title_sort", title); + if (StringUtils.isBlank(entityType)) { + entityType = Constants.ENTITY_TYPE_NONE; + } + addContainerMetadataField(doc, highlightedMetadataFields, toIgnoreMetadataFields, + "dspace.entity.type", entityType); + return doc; } @@ -164,4 +173,4 @@ public List getCollectionLocations(Context context, Collection collectio return locations; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java index 8521b7dda0de..e92819601839 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CommunityIndexFactoryImpl.java @@ -69,7 +69,7 @@ public SolrInputDocument buildDocument(Context context, IndexableCommunity index final Community community = indexableObject.getIndexedObject(); // Retrieve configuration - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(community); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, community); DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration .getHitHighlightingConfiguration(); List highlightedMetadataFields = new ArrayList<>(); @@ -135,4 +135,4 @@ public List getLocations(Context context, IndexableCommunity indexableDS return locations; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java index 8660bbebc796..55c99b168e7a 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java @@ -70,10 +70,20 @@ public SolrInputDocument buildDocument(Context context, T indexableObject) throw return doc; } + @Override + public SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException { + return buildDocument(context, indexableObject); + } + @Override public void writeDocument(Context context, T indexableObject, SolrInputDocument solrInputDocument) throws SQLException, IOException, SolrServerException { - writeDocument(solrInputDocument, null); + try { + writeDocument(solrInputDocument, null); + } catch (Exception e) { + log.error("Error occurred while writing SOLR document for {} object {}", + indexableObject.getType(), indexableObject.getID(), e); + } } /** @@ -95,7 +105,6 @@ protected void writeDocument(SolrInputDocument doc, FullTextContentStreams strea 100000); // Use Tika's Text parser as the streams are always from the TEXT bundle (i.e. already extracted text) - // TODO: We may wish to consider using Tika to extract the text in the future. TextAndCSVParser tikaParser = new TextAndCSVParser(); BodyContentHandler tikaHandler = new BodyContentHandler(charLimit); Metadata tikaMetadata = new Metadata(); @@ -114,9 +123,11 @@ protected void writeDocument(SolrInputDocument doc, FullTextContentStreams strea log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)." + " Only the first {} characters were indexed.", charLimit); } else { + log.error("Tika parsing error. Could not index full text.", saxe); throw new IOException("Tika parsing error. Could not index full text.", saxe); } } catch (TikaException ex) { + log.error("Tika parsing error. Could not index full text.", ex); throw new IOException("Tika parsing error. Could not index full text.", ex); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java index cd637cb1179b..f24e9875f006 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java @@ -14,13 +14,18 @@ import org.apache.solr.common.SolrInputDocument; import org.dspace.content.InProgressSubmission; import org.dspace.content.Item; +import org.dspace.content.WorkspaceItem; import org.dspace.core.Context; import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.indexobject.factory.CollectionIndexFactory; import org.dspace.discovery.indexobject.factory.InprogressSubmissionIndexFactory; import org.dspace.discovery.indexobject.factory.ItemIndexFactory; import org.dspace.eperson.EPerson; +import org.dspace.supervision.SupervisionOrder; +import org.dspace.supervision.service.SupervisionOrderService; import org.dspace.util.SolrUtils; +import org.dspace.workflow.WorkflowItem; import org.springframework.beans.factory.annotation.Autowired; /** @@ -36,6 +41,9 @@ public abstract class InprogressSubmissionIndexFactoryImpl @Autowired protected ItemIndexFactory indexableItemService; + @Autowired + protected SupervisionOrderService supervisionOrderService; + @Override public SolrInputDocument buildDocument(Context context, T indexableObject) throws SQLException, IOException { @@ -57,6 +65,8 @@ public void storeInprogressItemFields(Context context, SolrInputDocument doc, submitter.getFullName()); } + addSupervisedByFacetIndex(context, item, doc); + doc.addField("inprogress.item", new IndexableItem(inProgressSubmission.getItem()).getUniqueIndexID()); // get the location string (for searching by collection & community) @@ -68,7 +78,26 @@ public void storeInprogressItemFields(Context context, SolrInputDocument doc, locations.add("l" + inProgressSubmission.getCollection().getID()); // Add item metadata - indexableItemService.addDiscoveryFields(doc, context, item, SearchUtils.getAllDiscoveryConfigurations(item)); + List discoveryConfigurations; + if (inProgressSubmission instanceof WorkflowItem) { + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkflowItem) inProgressSubmission); + } else if (inProgressSubmission instanceof WorkspaceItem) { + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkspaceItem) inProgressSubmission); + } else { + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); + } + indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations); } + + private void addSupervisedByFacetIndex(Context context, Item item, SolrInputDocument doc) throws SQLException { + List supervisionOrders = supervisionOrderService.findByItem(context, item); + for (SupervisionOrder supervisionOrder : supervisionOrders) { + addFacetIndex(doc, "supervisedBy", supervisionOrder.getGroup().getID().toString(), + supervisionOrder.getGroup().getName()); + } + + } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index 07948bb0c30d..7cdb8b93d80e 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -7,10 +7,11 @@ */ package org.dspace.discovery.indexobject; +import static org.dspace.discovery.SolrServiceImpl.SOLR_FIELD_SUFFIX_FACET_PREFIXES; + import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; @@ -21,6 +22,8 @@ import java.util.Optional; import java.util.Set; import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; @@ -28,6 +31,7 @@ import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.common.SolrInputDocument; +import org.dspace.authority.service.AuthorityValueService; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; @@ -41,9 +45,8 @@ import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.discovery.FullTextContentStreams; -import org.dspace.discovery.IndexableObject; import org.dspace.discovery.SearchUtils; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; @@ -64,6 +67,9 @@ import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.util.MultiFormatDateParser; import org.dspace.util.SolrUtils; +import org.dspace.versioning.Version; +import org.dspace.versioning.VersionHistory; +import org.dspace.versioning.service.VersionHistoryService; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService; import org.springframework.beans.factory.annotation.Autowired; @@ -78,6 +84,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl findAll(Context context) throws SQLException { - Iterator items = itemService.findAllUnfiltered(context); + Iterator items = itemService.findAllRegularItems(context); return new Iterator() { @Override public boolean hasNext() { @@ -139,6 +151,7 @@ public SolrInputDocument buildDocument(Context context, IndexableItem indexableI doc.addField("withdrawn", item.isWithdrawn()); doc.addField("discoverable", item.isDiscoverable()); doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified())); + doc.addField("latestVersion", isLatestVersion(context, item)); EPerson submitter = item.getSubmitter(); if (submitter != null) { @@ -147,7 +160,7 @@ public SolrInputDocument buildDocument(Context context, IndexableItem indexableI } // Add the item metadata - List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); //mandatory facet to show status on mydspace @@ -159,13 +172,51 @@ public SolrInputDocument buildDocument(Context context, IndexableItem indexableI addNamedResourceTypeIndex(doc, acvalue); } - // write the index and close the inputstreamreaders - try { - log.info("Wrote Item: " + item.getID() + " to Index"); - } catch (RuntimeException e) { - log.error("Error while writing item to discovery index: " + item.getID() + " message:" - + e.getMessage(), e); + return doc; + } + + /** + * Check whether the given item is the latest version. + * If the latest item cannot be determined, because either the version history or the latest version is not present, + * assume the item is latest. + * @param context the DSpace context. + * @param item the item that should be checked. + * @return true if the item is the latest version, false otherwise. + */ + protected boolean isLatestVersion(Context context, Item item) throws SQLException { + VersionHistory history = versionHistoryService.findByItem(context, item); + if (history == null) { + // not all items have a version history + // if an item does not have a version history, it is by definition the latest version + return true; + } + + // start with the very latest version of the given item (may still be in workspace) + Version latestVersion = versionHistoryService.getLatestVersion(context, history); + + // find the latest version of the given item that is archived + while (latestVersion != null && !latestVersion.getItem().isArchived()) { + latestVersion = versionHistoryService.getPrevious(context, history, latestVersion); } + + // could not find an archived version of the given item + if (latestVersion == null) { + // this scenario should never happen, but let's err on the side of showing too many items vs. to little + // (see discovery.xml, a lot of discovery configs filter out all items that are not the latest version) + return true; + } + + // sanity check + assert latestVersion.getItem().isArchived(); + + return item.equals(latestVersion.getItem()); + } + + @Override + public SolrInputDocument buildNewDocument(Context context, IndexableItem indexableItem) + throws SQLException, IOException { + SolrInputDocument doc = buildDocument(context, indexableItem); + doc.addField(STATUS_FIELD, STATUS_FIELD_PREDB); return doc; } @@ -359,10 +410,13 @@ public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item "discovery.index.authority.ignore-prefered", Boolean.FALSE), true); - if (!ignorePrefered) { - preferedLabel = choiceAuthorityService - .getLabel(meta, collection, meta.getLanguage()); + if (!ignorePrefered && !authority.startsWith(AuthorityValueService.GENERATE)) { + try { + preferedLabel = choiceAuthorityService.getLabel(meta, collection, meta.getLanguage()); + } catch (Exception e) { + log.warn("Failed to get preferred label for " + field, e); + } } boolean ignoreVariants = @@ -377,8 +431,12 @@ public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item Boolean.FALSE), true); if (!ignoreVariants) { - variants = choiceAuthorityService + try { + variants = choiceAuthorityService .getVariants(meta, collection); + } catch (Exception e) { + log.warn("Failed to get variants for " + field, e); + } } } @@ -461,88 +519,10 @@ public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item + var); } } - + // if searchFilter is of type "facet", delegate to indexIfFilterTypeFacet method if (searchFilter.getFilterType().equals(DiscoverySearchFilterFacet.FILTER_TYPE_FACET)) { - if (searchFilter.getType().equals(DiscoveryConfigurationParameters.TYPE_TEXT)) { - //Add a special filter - //We use a separator to split up the lowercase and regular case, this is needed to - // get our filters in regular case - //Solr has issues with facet prefix and cases - if (authority != null) { - String facetValue = preferedLabel != null ? preferedLabel : value; - doc.addField(searchFilter.getIndexFieldName() + "_filter", facetValue - .toLowerCase() + separator + facetValue + SearchUtils.AUTHORITY_SEPARATOR - + authority); - } else { - doc.addField(searchFilter.getIndexFieldName() + "_filter", - value.toLowerCase() + separator + value); - } - } else if (searchFilter.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)) { - if (date != null) { - String indexField = searchFilter.getIndexFieldName() + ".year"; - String yearUTC = DateFormatUtils.formatUTC(date, "yyyy"); - doc.addField(searchFilter.getIndexFieldName() + "_keyword", yearUTC); - // add the year to the autocomplete index - doc.addField(searchFilter.getIndexFieldName() + "_ac", yearUTC); - doc.addField(indexField, yearUTC); - - if (yearUTC.startsWith("0")) { - doc.addField( - searchFilter.getIndexFieldName() - + "_keyword", - yearUTC.replaceFirst("0*", "")); - // add date without starting zeros for autocomplete e filtering - doc.addField( - searchFilter.getIndexFieldName() - + "_ac", - yearUTC.replaceFirst("0*", "")); - doc.addField( - searchFilter.getIndexFieldName() - + "_ac", - value.replaceFirst("0*", "")); - doc.addField( - searchFilter.getIndexFieldName() - + "_keyword", - value.replaceFirst("0*", "")); - } - - //Also save a sort value of this year, this is required for determining the upper - // & lower bound year of our facet - if (doc.getField(indexField + "_sort") == null) { - //We can only add one year so take the first one - doc.addField(indexField + "_sort", yearUTC); - } - } - } else if (searchFilter.getType() - .equals(DiscoveryConfigurationParameters.TYPE_HIERARCHICAL)) { - HierarchicalSidebarFacetConfiguration hierarchicalSidebarFacetConfiguration = - (HierarchicalSidebarFacetConfiguration) searchFilter; - String[] subValues = value.split(hierarchicalSidebarFacetConfiguration.getSplitter()); - if (hierarchicalSidebarFacetConfiguration - .isSkipFirstNodeLevel() && 1 < subValues.length) { - //Remove the first element of our array - subValues = (String[]) ArrayUtils.subarray(subValues, 1, subValues.length); - } - for (int i = 0; i < subValues.length; i++) { - StringBuilder valueBuilder = new StringBuilder(); - for (int j = 0; j <= i; j++) { - valueBuilder.append(subValues[j]); - if (j < i) { - valueBuilder.append(hierarchicalSidebarFacetConfiguration.getSplitter()); - } - } - - String indexValue = valueBuilder.toString().trim(); - doc.addField(searchFilter.getIndexFieldName() + "_tax_" + i + "_filter", - indexValue.toLowerCase() + separator + indexValue); - //We add the field x times that it has occurred - for (int j = i; j < subValues.length; j++) { - doc.addField(searchFilter.getIndexFieldName() + "_filter", - indexValue.toLowerCase() + separator + indexValue); - doc.addField(searchFilter.getIndexFieldName() + "_keyword", indexValue); - } - } - } + indexIfFilterTypeFacet(doc, searchFilter, value, date, + authority, preferedLabel, separator); } } } @@ -618,7 +598,7 @@ public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item } } catch (Exception e) { - log.error(LogManager.getHeader(context, "item_metadata_discovery_error", + log.error(LogHelper.getHeader(context, "item_metadata_discovery_error", "Item identifier: " + item.getID()), e); } @@ -641,7 +621,7 @@ public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item } } catch (Exception e) { - log.error(LogManager.getHeader(context, "item_publication_group_discovery_error", + log.error(LogHelper.getHeader(context, "item_publication_group_discovery_error", "Item identifier: " + item.getID()), e); } @@ -706,26 +686,31 @@ public boolean supports(Object object) { } @Override - public List getIndexableObjects(Context context, Item object) throws SQLException { - List results = new ArrayList<>(); - if (object.isArchived() || object.isWithdrawn()) { - // We only want to index an item as an item if it is not in workflow - results.addAll(Arrays.asList(new IndexableItem(object))); - } else { - // Check if we have a workflow / workspace item - final WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, object); - if (workspaceItem != null) { - results.addAll(workspaceItemIndexFactory.getIndexableObjects(context, workspaceItem)); - } else { - // Check if we a workflow item - final XmlWorkflowItem xmlWorkflowItem = xmlWorkflowItemService.findByItem(context, object); - if (xmlWorkflowItem != null) { - results.addAll(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem)); - } - } + public List getIndexableObjects(Context context, Item item) throws SQLException { + if (item.isArchived() || item.isWithdrawn()) { + // we only want to index an item as an item if it is not in workflow + return List.of(new IndexableItem(item)); + } + + final WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, item); + if (workspaceItem != null) { + // a workspace item is linked to the given item + return List.copyOf(workspaceItemIndexFactory.getIndexableObjects(context, workspaceItem)); + } + + final XmlWorkflowItem xmlWorkflowItem = xmlWorkflowItemService.findByItem(context, item); + if (xmlWorkflowItem != null) { + // a workflow item is linked to the given item + return List.copyOf(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem)); } - return results; + if (!isLatestVersion(context, item)) { + // the given item is an older version of another item + return List.of(new IndexableItem(item)); + } + + // nothing to index + return List.of(); } @Override @@ -733,4 +718,140 @@ public Optional findIndexableObject(Context context, String id) t final Item item = itemService.find(context, UUID.fromString(id)); return item == null ? Optional.empty() : Optional.of(new IndexableItem(item)); } + + /** + * Handles indexing when discoverySearchFilter is of type facet. + * + * @param doc the solr document + * @param searchFilter the discoverySearchFilter + * @param value the metadata value + * @param date Date object + * @param authority the authority key + * @param preferedLabel the preferred label for metadata field + * @param separator the separator being used to separate lowercase and regular case + */ + private void indexIfFilterTypeFacet(SolrInputDocument doc, DiscoverySearchFilter searchFilter, String value, + Date date, String authority, String preferedLabel, String separator) { + if (searchFilter.getType().equals(DiscoveryConfigurationParameters.TYPE_TEXT)) { + //Add a special filter + //We use a separator to split up the lowercase and regular case, this is needed to + // get our filters in regular case + //Solr has issues with facet prefix and cases + if (authority != null) { + String facetValue = preferedLabel != null ? preferedLabel : value; + doc.addField(searchFilter.getIndexFieldName() + "_filter", facetValue + .toLowerCase() + separator + facetValue + SearchUtils.AUTHORITY_SEPARATOR + + authority); + } else { + doc.addField(searchFilter.getIndexFieldName() + "_filter", + value.toLowerCase() + separator + value); + } + //Also add prefix field with all parts of value + saveFacetPrefixParts(doc, searchFilter, value, separator, authority, preferedLabel); + } else if (searchFilter.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)) { + if (date != null) { + String indexField = searchFilter.getIndexFieldName() + ".year"; + String yearUTC = DateFormatUtils.formatUTC(date, "yyyy"); + doc.addField(searchFilter.getIndexFieldName() + "_keyword", yearUTC); + // add the year to the autocomplete index + doc.addField(searchFilter.getIndexFieldName() + "_ac", yearUTC); + doc.addField(indexField, yearUTC); + + if (yearUTC.startsWith("0")) { + doc.addField( + searchFilter.getIndexFieldName() + + "_keyword", + yearUTC.replaceFirst("0*", "")); + // add date without starting zeros for autocomplete e filtering + doc.addField( + searchFilter.getIndexFieldName() + + "_ac", + yearUTC.replaceFirst("0*", "")); + doc.addField( + searchFilter.getIndexFieldName() + + "_ac", + value.replaceFirst("0*", "")); + doc.addField( + searchFilter.getIndexFieldName() + + "_keyword", + value.replaceFirst("0*", "")); + } + + //Also save a sort value of this year, this is required for determining the upper + // & lower bound year of our facet + if (doc.getField(indexField + "_sort") == null) { + //We can only add one year so take the first one + doc.addField(indexField + "_sort", yearUTC); + } + } + } else if (searchFilter.getType() + .equals(DiscoveryConfigurationParameters.TYPE_HIERARCHICAL)) { + HierarchicalSidebarFacetConfiguration hierarchicalSidebarFacetConfiguration = + (HierarchicalSidebarFacetConfiguration) searchFilter; + String[] subValues = value.split(hierarchicalSidebarFacetConfiguration.getSplitter()); + if (hierarchicalSidebarFacetConfiguration + .isSkipFirstNodeLevel() && 1 < subValues.length) { + //Remove the first element of our array + subValues = (String[]) ArrayUtils.subarray(subValues, 1, subValues.length); + } + for (int i = 0; i < subValues.length; i++) { + StringBuilder valueBuilder = new StringBuilder(); + for (int j = 0; j <= i; j++) { + valueBuilder.append(subValues[j]); + if (j < i) { + valueBuilder.append(hierarchicalSidebarFacetConfiguration.getSplitter()); + } + } + + String indexValue = valueBuilder.toString().trim(); + doc.addField(searchFilter.getIndexFieldName() + "_tax_" + i + "_filter", + indexValue.toLowerCase() + separator + indexValue); + //We add the field x times that it has occurred + for (int j = i; j < subValues.length; j++) { + doc.addField(searchFilter.getIndexFieldName() + "_filter", + indexValue.toLowerCase() + separator + indexValue); + doc.addField(searchFilter.getIndexFieldName() + "_keyword", indexValue); + } + } + //Also add prefix field with all parts of value + saveFacetPrefixParts(doc, searchFilter, value, separator, authority, preferedLabel); + } + } + + /** + * Stores every "value part" in lowercase, together with the original value in regular case, + * separated by the separator, in the {fieldName}{@link SolrServiceImpl.SOLR_FIELD_SUFFIX_FACET_PREFIXES} field. + *
    + * E.g. Author "With Multiple Words" gets stored as: + *
    + * + * with multiple words ||| With Multiple Words,
    + * multiple words ||| With Multiple Words,
    + * words ||| With Multiple Words,
    + *
    + * in the author_prefix field. + * @param doc the solr document + * @param searchFilter the current discoverySearchFilter + * @param value the metadata value + * @param separator the separator being used to separate value part and original value + */ + private void saveFacetPrefixParts(SolrInputDocument doc, DiscoverySearchFilter searchFilter, String value, + String separator, String authority, String preferedLabel) { + value = StringUtils.normalizeSpace(value); + Pattern pattern = Pattern.compile("\\b\\w+\\b", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CHARACTER_CLASS); + Matcher matcher = pattern.matcher(value); + while (matcher.find()) { + int index = matcher.start(); + String currentPart = StringUtils.substring(value, index); + if (authority != null) { + String facetValue = preferedLabel != null ? preferedLabel : currentPart; + doc.addField(searchFilter.getIndexFieldName() + SOLR_FIELD_SUFFIX_FACET_PREFIXES, + facetValue.toLowerCase() + separator + value + + SearchUtils.AUTHORITY_SEPARATOR + authority); + } else { + doc.addField(searchFilter.getIndexFieldName() + SOLR_FIELD_SUFFIX_FACET_PREFIXES, + currentPart.toLowerCase() + separator + value); + } + } + } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java index 518a8ff14561..bef44326fe75 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java @@ -64,6 +64,7 @@ public SolrInputDocument buildDocument(Context context, IndexableMetadataField i Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); // add read permission on doc for anonymous group doc.addField("read", "g" + anonymousGroup.getID()); + doc.addField(FIELD_NAME_VARIATIONS + "_sort", fieldName); return doc; } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/IndexFactory.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/IndexFactory.java index 6644da248d80..7946311796a4 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/IndexFactory.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/IndexFactory.java @@ -46,6 +46,14 @@ public interface IndexFactory { */ SolrInputDocument buildDocument(Context context, T indexableObject) throws SQLException, IOException; + /** + * Create solr document with all the shared fields initialized. + * Can contain special fields required for "new" documents vs regular buildDocument + * @param indexableObject the indexableObject that we want to index + * @return initialized solr document + */ + SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException; + /** * Write the provided document to the solr core * @param context DSpace context object diff --git a/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java new file mode 100644 index 000000000000..92a973dff883 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java @@ -0,0 +1,428 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery.utils; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; +import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; +import org.dspace.core.Context; +import org.dspace.core.LogHelper; +import org.dspace.discovery.DiscoverFacetField; +import org.dspace.discovery.DiscoverFilterQuery; +import org.dspace.discovery.DiscoverHitHighlightingField; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.FacetYearRange; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.configuration.DiscoveryConfiguration; +import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; +import org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration; +import org.dspace.discovery.configuration.DiscoverySearchFilter; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; +import org.dspace.discovery.configuration.DiscoverySortConfiguration; +import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; +import org.dspace.discovery.indexobject.factory.IndexFactory; +import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Autowired; + +public class DiscoverQueryBuilder implements InitializingBean { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DiscoverQueryBuilder.class); + + @Autowired + private SearchService searchService; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private List indexableFactories; + + private int pageSizeLimit; + + @Override + public void afterPropertiesSet() throws Exception { + pageSizeLimit = configurationService.getIntProperty("rest.search.max.results", 100); + } + + /** + * Build a discovery query + * + * @param context the DSpace context + * @param scope the scope for this discovery query + * @param discoveryConfiguration the discovery configuration for this discovery query + * @param query the query string for this discovery query + * @param searchFilters the search filters for this discovery query + * @param dsoType only include search results with this type + * @param pageSize the page size for this discovery query + * @param offset the offset for this discovery query + * @param sortProperty the sort property for this discovery query + * @param sortDirection the sort direction for this discovery query + */ + public DiscoverQuery buildQuery(Context context, IndexableObject scope, + DiscoveryConfiguration discoveryConfiguration, + String query, List searchFilters, + String dsoType, Integer pageSize, Long offset, String sortProperty, + String sortDirection) throws SearchServiceException { + + List dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList(); + + return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, pageSize, offset, + sortProperty, sortDirection); + } + + + /** + * Build a discovery query + * + * @param context the DSpace context + * @param scope the scope for this discovery query + * @param discoveryConfiguration the discovery configuration for this discovery query + * @param query the query string for this discovery query + * @param searchFilters the search filters for this discovery query + * @param dsoTypes only include search results with one of these types + * @param pageSize the page size for this discovery query + * @param offset the offset for this discovery query + * @param sortProperty the sort property for this discovery query + * @param sortDirection the sort direction for this discovery query + */ + public DiscoverQuery buildQuery(Context context, IndexableObject scope, + DiscoveryConfiguration discoveryConfiguration, + String query, List searchFilters, + List dsoTypes, Integer pageSize, Long offset, String sortProperty, + String sortDirection) + throws IllegalArgumentException, SearchServiceException { + + DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, + dsoTypes); + + //When all search criteria are set, configure facet results + addFaceting(context, scope, queryArgs, discoveryConfiguration); + + //Configure pagination and sorting + configurePagination(pageSize, offset, queryArgs); + configureSorting(sortProperty, sortDirection, queryArgs, discoveryConfiguration.getSearchSortConfiguration()); + + addDiscoveryHitHighlightFields(discoveryConfiguration, queryArgs); + return queryArgs; + } + + private void addDiscoveryHitHighlightFields(DiscoveryConfiguration discoveryConfiguration, + DiscoverQuery queryArgs) { + if (discoveryConfiguration.getHitHighlightingConfiguration() != null) { + List metadataFields = discoveryConfiguration + .getHitHighlightingConfiguration().getMetadataFields(); + for (DiscoveryHitHighlightFieldConfiguration fieldConfiguration : metadataFields) { + queryArgs.addHitHighlightingField( + new DiscoverHitHighlightingField(fieldConfiguration.getField(), fieldConfiguration.getMaxSize(), + fieldConfiguration.getSnippets())); + } + } + } + + /** + * Create a discovery facet query. + * + * @param context the DSpace context + * @param scope the scope for this discovery query + * @param discoveryConfiguration the discovery configuration for this discovery query + * @param prefix limit the facets results to those starting with the given prefix. + * @param query the query string for this discovery query + * @param searchFilters the search filters for this discovery query + * @param dsoType only include search results with this type + * @param pageSize the page size for this discovery query + * @param offset the offset for this discovery query + * @param facetName the facet field + */ + public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, + DiscoveryConfiguration discoveryConfiguration, + String prefix, String query, List searchFilters, + String dsoType, Integer pageSize, Long offset, String facetName) + throws IllegalArgumentException { + + List dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList(); + + return buildFacetQuery( + context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, pageSize, offset, + facetName); + } + + /** + * Create a discovery facet query. + * + * @param context the DSpace context + * @param scope the scope for this discovery query + * @param discoveryConfiguration the discovery configuration for this discovery query + * @param prefix limit the facets results to those starting with the given prefix. + * @param query the query string for this discovery query + * @param searchFilters the search filters for this discovery query + * @param dsoTypes only include search results with one of these types + * @param pageSize the page size for this discovery query + * @param offset the offset for this discovery query + * @param facetName the facet field + */ + public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, + DiscoveryConfiguration discoveryConfiguration, + String prefix, String query, List searchFilters, + List dsoTypes, Integer pageSize, Long offset, String facetName) + throws IllegalArgumentException { + + DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, + dsoTypes); + + //When all search criteria are set, configure facet results + addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, pageSize); + + //We don' want any search results, we only want facet values + queryArgs.setMaxResults(0); + + //Configure pagination + configurePaginationForFacets(offset, queryArgs); + + return queryArgs; + } + + private void configurePaginationForFacets(Long offset, DiscoverQuery queryArgs) { + if (offset != null) { + queryArgs.setFacetOffset(Math.toIntExact(offset)); + } + } + + private DiscoverQuery addFacetingForFacets(Context context, IndexableObject scope, String prefix, + DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration, + String facetName, Integer pageSize) + throws IllegalArgumentException { + + DiscoverySearchFilterFacet facet = discoveryConfiguration.getSidebarFacet(facetName); + if (facet != null) { + queryArgs.setFacetMinCount(1); + + pageSize = pageSize != null ? Math.min(pageSizeLimit, pageSize) : pageSizeLimit; + + fillFacetIntoQueryArgs(context, scope, prefix, queryArgs, facet, pageSize); + + } else { + throw new IllegalArgumentException(facetName + " is not a valid search facet"); + } + + return queryArgs; + } + + private void fillFacetIntoQueryArgs(Context context, IndexableObject scope, String prefix, + DiscoverQuery queryArgs, DiscoverySearchFilterFacet facet, final int pageSize) { + if (facet.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)) { + try { + FacetYearRange facetYearRange = + searchService.getFacetYearRange(context, scope, facet, queryArgs.getFilterQueries(), queryArgs); + + queryArgs.addYearRangeFacet(facet, facetYearRange); + + } catch (Exception e) { + log.error(LogHelper.getHeader(context, "Error in Discovery while setting up date facet range", + "date facet: " + facet), e); + } + + } else { + + //Add one to our facet limit to make sure that if we have more then the shown facets that we show our + // "show more" url + int facetLimit = pageSize + 1; + //This should take care of the sorting for us + prefix = StringUtils.isNotBlank(prefix) ? prefix.toLowerCase() : null; + queryArgs.addFacetField(new DiscoverFacetField(facet.getIndexFieldName(), facet.getType(), facetLimit, + facet.getSortOrderSidebar(), + StringUtils.trimToNull(prefix))); + } + } + + private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration, + String query, + List searchFilters, List dsoTypes) + throws IllegalArgumentException { + DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration); + + queryArgs.addFilterQueries(convertFiltersToString(context, discoveryConfiguration, searchFilters)); + + //Set search query + if (StringUtils.isNotBlank(query)) { + queryArgs.setQuery(query); + } + + //Limit results to DSO types + if (isNotEmpty(dsoTypes)) { + dsoTypes.stream() + .map(this::getDsoType) + .forEach(queryArgs::addDSpaceObjectFilter); + } + + return queryArgs; + } + + private DiscoverQuery buildBaseQueryForConfiguration(DiscoveryConfiguration discoveryConfiguration) { + DiscoverQuery queryArgs = new DiscoverQuery(); + queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); + queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() + .toArray( + new String[discoveryConfiguration + .getDefaultFilterQueries() + .size()])); + return queryArgs; + } + + private void configureSorting(String sortProperty, String sortDirection, DiscoverQuery queryArgs, + DiscoverySortConfiguration searchSortConfiguration) + throws IllegalArgumentException, SearchServiceException { + String sortBy = sortProperty; + String sortOrder = sortDirection; + + //Load defaults if we did not receive values + if (sortBy == null) { + sortBy = getDefaultSortField(searchSortConfiguration); + } + if (sortOrder == null) { + sortOrder = getDefaultSortDirection(searchSortConfiguration, sortOrder); + } + + if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) { + throw new SearchServiceException( + "The field: " + sortBy + "is not configured for the configuration!"); + } + + + //Update Discovery query + DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration + .getSortFieldConfiguration(sortBy); + + if (sortFieldConfiguration != null) { + String sortField = searchService + .toSortFieldIndex(sortFieldConfiguration.getMetadataField(), sortFieldConfiguration.getType()); + + if ("asc".equalsIgnoreCase(sortOrder)) { + queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.asc); + } else if ("desc".equalsIgnoreCase(sortOrder)) { + queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.desc); + } else { + throw new IllegalArgumentException(sortOrder + " is not a valid sort order"); + } + + } else { + throw new IllegalArgumentException(sortBy + " is not a valid sort field"); + } + } + + private boolean isConfigured(String sortBy, DiscoverySortConfiguration searchSortConfiguration) { + return Objects.nonNull(searchSortConfiguration.getSortFieldConfiguration(sortBy)); + } + + private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) { + if (searchSortConfiguration.getDefaultSortField() != null) { + sortOrder = searchSortConfiguration.getDefaultSortField().getDefaultSortOrder().name(); + } else if (Objects.nonNull(searchSortConfiguration.getSortFields()) && + !searchSortConfiguration.getSortFields().isEmpty()) { + sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name(); + } + return sortOrder; + } + + private String getDefaultSortField(DiscoverySortConfiguration searchSortConfiguration) { + String sortBy;// Attempt to find the default one, if none found we use SCORE + sortBy = "score"; + if (searchSortConfiguration.getDefaultSortField() != null) { + sortBy = searchSortConfiguration.getDefaultSortField().getMetadataField(); + } else if (Objects.nonNull(searchSortConfiguration.getSortFields()) && + !searchSortConfiguration.getSortFields().isEmpty()) { + DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0); + if (StringUtils.isBlank(defaultSort.getMetadataField())) { + return sortBy; + } + sortBy = defaultSort.getMetadataField(); + } + return sortBy; + } + + private void configurePagination(Integer size, Long offset, DiscoverQuery queryArgs) { + queryArgs.setMaxResults(size != null ? Math.min(pageSizeLimit, size) : pageSizeLimit); + queryArgs.setStart(offset != null ? Math.toIntExact(offset) : 0); + } + + private String getDsoType(String dsoType) throws IllegalArgumentException { + for (IndexFactory indexFactory : indexableFactories) { + if (StringUtils.equalsIgnoreCase(indexFactory.getType(), dsoType)) { + return indexFactory.getType(); + } + } + throw new IllegalArgumentException(dsoType + " is not a valid DSpace Object type"); + } + + public void setIndexableFactories(List indexableFactories) { + this.indexableFactories = indexableFactories; + } + + private DiscoverQuery addFaceting(Context context, IndexableObject scope, DiscoverQuery queryArgs, + DiscoveryConfiguration discoveryConfiguration) { + + List facets = discoveryConfiguration.getSidebarFacets(); + + log.debug("facets for configuration " + discoveryConfiguration.getId() + ": " + (facets != null ? facets + .size() : null)); + + if (facets != null) { + queryArgs.setFacetMinCount(1); + + /** enable faceting of search results */ + for (DiscoverySearchFilterFacet facet : facets) { + fillFacetIntoQueryArgs(context, scope, null, queryArgs, facet, facet.getFacetLimit()); + } + } + + return queryArgs; + } + + private String[] convertFiltersToString(Context context, DiscoveryConfiguration discoveryConfiguration, + List searchFilters) + throws IllegalArgumentException { + ArrayList filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters)); + + try { + for (QueryBuilderSearchFilter searchFilter : CollectionUtils.emptyIfNull(searchFilters)) { + DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName()); + if (filter == null) { + throw new IllegalArgumentException(searchFilter.getName() + " is not a valid search filter"); + } + + DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context, + filter.getIndexFieldName(), + searchFilter.getOperator(), + searchFilter.getValue(), + discoveryConfiguration); + + if (filterQuery != null) { + filterQueries.add(filterQuery.getFilterQuery()); + } + } + } catch (SQLException e) { + throw new IllegalArgumentException("There was a problem parsing the search filters.", e); + } + + return filterQueries.toArray(new String[filterQueries.size()]); + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/utils/parameter/QueryBuilderSearchFilter.java b/dspace-api/src/main/java/org/dspace/discovery/utils/parameter/QueryBuilderSearchFilter.java new file mode 100644 index 000000000000..f1d16070de38 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/discovery/utils/parameter/QueryBuilderSearchFilter.java @@ -0,0 +1,70 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.discovery.utils.parameter; + +import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; + +/** + * Representation for a Discovery search filter + */ +public class QueryBuilderSearchFilter { + + private String name; + private String operator; + private String value; + + public QueryBuilderSearchFilter(final String name, final String operator, final String value) { + this.name = name; + this.operator = operator; + this.value = value; + } + + public String getName() { + return name; + } + + public String getOperator() { + return operator; + } + + public String getValue() { + return value; + } + + public String toString() { + return "QueryBuilderSearchFilter{" + + "name='" + name + '\'' + + ", operator='" + operator + '\'' + + ", value='" + value + '\'' + + '}'; + } + + public boolean equals(Object object) { + if (object instanceof QueryBuilderSearchFilter) { + QueryBuilderSearchFilter obj = (QueryBuilderSearchFilter) object; + + if (!StringUtils.equals(obj.getName(), getName())) { + return false; + } + if (!StringUtils.equals(obj.getOperator(), getOperator())) { + return false; + } + if (!StringUtils.equals(obj.getValue(), getValue())) { + return false; + } + return true; + } + return false; + } + + public int hashCode() { + return Objects.hash(name, operator, value); + } +} diff --git a/dspace-api/src/main/java/org/dspace/disseminate/CitationDocumentServiceImpl.java b/dspace-api/src/main/java/org/dspace/disseminate/CitationDocumentServiceImpl.java index 4f9bdc2bbc6f..c20961db7544 100644 --- a/dspace-api/src/main/java/org/dspace/disseminate/CitationDocumentServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/disseminate/CitationDocumentServiceImpl.java @@ -8,7 +8,6 @@ package org.dspace.disseminate; import java.awt.Color; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; @@ -297,13 +296,18 @@ public boolean canGenerateCitationVersion(Context context, Bitstream bitstream) } @Override - public Pair makeCitedDocument(Context context, Bitstream bitstream) + public Pair makeCitedDocument(Context context, Bitstream bitstream) throws IOException, SQLException, AuthorizeException { PDDocument document = new PDDocument(); PDDocument sourceDocument = new PDDocument(); try { Item item = (Item) bitstreamService.getParentObject(context, bitstream); - sourceDocument = sourceDocument.load(bitstreamService.retrieve(context, bitstream)); + final InputStream inputStream = bitstreamService.retrieve(context, bitstream); + try { + sourceDocument = sourceDocument.load(inputStream); + } finally { + inputStream.close(); + } PDPage coverPage = new PDPage(citationPageFormat); generateCoverPage(context, document, coverPage, item); addCoverPageToDocument(document, sourceDocument, coverPage); @@ -313,7 +317,7 @@ public Pair makeCitedDocument(Context context, Bitstream bits document.save(out); byte[] data = out.toByteArray(); - return Pair.of((InputStream) new ByteArrayInputStream(data), Long.valueOf(data.length)); + return Pair.of(data, Long.valueOf(data.length)); } } finally { diff --git a/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java b/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java index 4a59de3f5fe1..0566fc525c06 100644 --- a/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java +++ b/dspace-api/src/main/java/org/dspace/disseminate/service/CitationDocumentService.java @@ -8,7 +8,6 @@ package org.dspace.disseminate.service; import java.io.IOException; -import java.io.InputStream; import java.sql.SQLException; import org.apache.commons.lang3.tuple.Pair; @@ -84,7 +83,7 @@ public interface CitationDocumentService { * @throws SQLException if database error * @throws AuthorizeException if authorization error */ - public Pair makeCitedDocument(Context context, Bitstream bitstream) + public Pair makeCitedDocument(Context context, Bitstream bitstream) throws IOException, SQLException, AuthorizeException; /** diff --git a/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java index 25c61f511a79..3d4eab125f92 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/AccountServiceImpl.java @@ -12,9 +12,9 @@ import java.util.Locale; import javax.mail.MessagingException; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.authenticate.service.AuthenticationService; import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; import org.dspace.core.Email; @@ -52,6 +52,9 @@ public class AccountServiceImpl implements AccountService { @Autowired private ConfigurationService configurationService; + @Autowired + private AuthenticationService authenticationService; + protected AccountServiceImpl() { } @@ -80,6 +83,9 @@ public void sendRegistrationInfo(Context context, String email) if (!configurationService.getBooleanProperty("user.registration", true)) { throw new IllegalStateException("The user.registration parameter was set to false"); } + if (!authenticationService.canSelfRegister(context, null, email)) { + throw new IllegalStateException("self registration is not allowed with this email address"); + } sendInfo(context, email, true, true); } @@ -178,14 +184,6 @@ public void deleteToken(Context context, String token) registrationDataService.deleteByToken(context, token); } - @Override - public boolean verifyPasswordStructure(String password) { - if (StringUtils.length(password) < 6) { - return false; - } - return true; - } - /** * THIS IS AN INTERNAL METHOD. THE SEND PARAMETER ALLOWS IT TO BE USED FOR * TESTING PURPOSES. diff --git a/dspace-api/src/main/java/org/dspace/eperson/CaptchaServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/CaptchaServiceImpl.java new file mode 100644 index 000000000000..0ab66aea5c2e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/CaptchaServiceImpl.java @@ -0,0 +1,125 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.regex.Pattern; +import javax.annotation.PostConstruct; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.http.HttpResponse; +import org.apache.http.NameValuePair; +import org.apache.http.client.HttpClient; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicNameValuePair; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.eperson.service.CaptchaService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.StringUtils; + +/** + * Basic services implementation for the Captcha. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class CaptchaServiceImpl implements CaptchaService { + + private static final Logger log = LogManager.getLogger(CaptchaServiceImpl.class); + + private static Pattern RESPONSE_PATTERN = Pattern.compile("[A-Za-z0-9_-]+"); + + private CaptchaSettings captchaSettings; + + @Autowired + private ConfigurationService configurationService; + + @PostConstruct + public void init() { + captchaSettings = new CaptchaSettings(); + captchaSettings.setSite(configurationService.getProperty("google.recaptcha.key.site")); + captchaSettings.setSecret(configurationService.getProperty("google.recaptcha.key.secret")); + captchaSettings.setSiteVerify(configurationService.getProperty("google.recaptcha.site-verify")); + captchaSettings.setCaptchaVersion(configurationService.getProperty("google.recaptcha.version", "v2")); + captchaSettings.setThreshold(Float.parseFloat( + configurationService.getProperty("google.recaptcha.key.threshold", "0.5"))); + } + + @Override + public void processResponse(String response, String action) throws InvalidReCaptchaException { + + if (!responseSanityCheck(response)) { + throw new InvalidReCaptchaException("Response contains invalid characters"); + } + + URI verifyUri = URI.create(captchaSettings.getSiteVerify()); + + List params = new ArrayList(3); + params.add(new BasicNameValuePair("secret", captchaSettings.getSecret())); + params.add(new BasicNameValuePair("response", response)); + params.add(new BasicNameValuePair("remoteip", "")); + + HttpPost httpPost = new HttpPost(verifyUri); + try { + httpPost.addHeader("Accept", "application/json"); + httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded"); + httpPost.setEntity(new UrlEncodedFormEntity(params, "UTF-8")); + } catch (UnsupportedEncodingException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + + HttpClient httpClient = HttpClientBuilder.create().build(); + HttpResponse httpResponse; + GoogleCaptchaResponse googleResponse; + final ObjectMapper objectMapper = new ObjectMapper(); + try { + httpResponse = httpClient.execute(httpPost); + googleResponse = objectMapper.readValue(httpResponse.getEntity().getContent(), GoogleCaptchaResponse.class); + } catch (IOException e) { + log.error(e.getMessage(), e); + throw new RuntimeException("Error during verify google recaptcha site", e); + } + validateGoogleResponse(googleResponse, action); + } + + private boolean responseSanityCheck(String response) { + return StringUtils.hasLength(response) && RESPONSE_PATTERN.matcher(response).matches(); + } + + private void validateGoogleResponse(GoogleCaptchaResponse googleResponse, String action) { + if (Objects.isNull(googleResponse)) { + log.error("Google reCaptcha response was empty. ReCaptcha could not be validated."); + throw new InvalidReCaptchaException("reCaptcha was not successfully validated"); + } + + if ("v2".equals(captchaSettings.getCaptchaVersion())) { + if (!googleResponse.isSuccess()) { + log.error("Google reCaptcha v2 returned an unsuccessful response. ReCaptcha was not validated."); + throw new InvalidReCaptchaException("reCaptcha was not successfully validated"); + } + } else { + if (!googleResponse.isSuccess() || !googleResponse.getAction().equals(action) + || googleResponse.getScore() < captchaSettings.getThreshold()) { + log.error("Google reCaptcha v3 returned an unsuccessful response with" + + " action {" + googleResponse.getAction() + "} and score {" + googleResponse.getScore() + "}." + + " ReCaptcha was not validated."); + throw new InvalidReCaptchaException("reCaptcha was not successfully validated"); + } + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/CaptchaSettings.java b/dspace-api/src/main/java/org/dspace/eperson/CaptchaSettings.java new file mode 100644 index 000000000000..e1fe41f9a6fe --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/CaptchaSettings.java @@ -0,0 +1,62 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +/** + * This model class represent reCaptcha Google credentials + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public class CaptchaSettings { + + private String site; + private String secret; + private float threshold; + private String siteVerify; + private String captchaVersion; + + public String getSite() { + return site; + } + + public void setSite(String site) { + this.site = site; + } + + public String getSecret() { + return secret; + } + + public void setSecret(String secret) { + this.secret = secret; + } + + public float getThreshold() { + return threshold; + } + + public void setThreshold(float threshold) { + this.threshold = threshold; + } + + public String getSiteVerify() { + return siteVerify; + } + + public void setSiteVerify(String siteVerify) { + this.siteVerify = siteVerify; + } + + public String getCaptchaVersion() { + return captchaVersion; + } + + public void setCaptchaVersion(String captchaVersion) { + this.captchaVersion = captchaVersion; + } +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPerson.java b/dspace-api/src/main/java/org/dspace/eperson/EPerson.java index 3c48a5244afc..da83a1cafd37 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPerson.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPerson.java @@ -37,7 +37,6 @@ * Class representing an e-person. * * @author David Stuve - * @version $Revision$ */ @Entity @Cacheable @@ -381,6 +380,13 @@ String getDigestAlgorithm() { return digestAlgorithm; } + /** + * Store the digest algorithm used to hash the password. You should also + * set the {@link setPassword password hash} and the + * {@link setDigestAlgorithm digest algorithm}. + * + * @param digestAlgorithm + */ void setDigestAlgorithm(String digestAlgorithm) { this.digestAlgorithm = digestAlgorithm; } @@ -389,6 +395,13 @@ String getSalt() { return salt; } + /** + * Store the salt used when hashing the password. You should also set the + * {@link setPassword password hash} and the {@link setDigestAlgorithm + * digest algorithm}. + * + * @param salt + */ void setSalt(String salt) { this.salt = salt; } @@ -397,6 +410,12 @@ String getPassword() { return password; } + /** + * Store the hash of a password. You should also set the + * {@link setSalt salt} and the {@link setDigestAlgorithm digest algorithm}. + * + * @param password + */ void setPassword(String password) { this.password = password; } @@ -427,4 +446,8 @@ public Date getPreviousActive() { return previousActive; } + public boolean hasPasswordSet() { + return StringUtils.isNotBlank(getPassword()); + } + } diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonCLITool.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonCLITool.java index aee2e7a082bd..343ddcccfa39 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonCLITool.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonCLITool.java @@ -13,6 +13,7 @@ import java.io.IOException; import java.io.InputStreamReader; import java.sql.SQLException; +import java.util.Arrays; import java.util.List; import java.util.Locale; @@ -28,6 +29,8 @@ import org.dspace.core.Context; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; +import org.dspace.util.ConsoleService; +import org.dspace.util.ConsoleServiceImpl; public class EPersonCLITool { @@ -57,13 +60,22 @@ public class EPersonCLITool { private static final Option OPT_NEW_EMAIL = new Option("i", "newEmail", true, "new email address"); private static final Option OPT_NEW_NETID = new Option("I", "newNetid", true, "new network ID"); + private static final Option OPT_NEW_PASSWORD + = new Option("w", "newPassword", false, "prompt for new password"); - private static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + static final String ERR_PASSWORD_EMPTY = "The new password may not be empty."; + static final String ERR_PASSWORD_NOMATCH = "Passwords do not match. Password not set"; + + private static final EPersonService ePersonService + = EPersonServiceFactory.getInstance().getEPersonService(); + + private static ConsoleService consoleService + = new ConsoleServiceImpl(); /** * Default constructor */ - private EPersonCLITool() { } + EPersonCLITool() { } /** * Tool for manipulating user accounts. @@ -110,7 +122,6 @@ public static void main(String argv[]) new HelpFormatter().printHelp("user [options]", globalOptions); context.abort(); status = 1; - throw new IllegalArgumentException(); } if (context.isValid()) { @@ -120,6 +131,8 @@ public static void main(String argv[]) System.err.println(ex.getMessage()); } } + + System.exit(status); } /** @@ -177,11 +190,11 @@ private static int cmdAdd(Context context, String[] argv) throws AuthorizeExcept EPerson eperson = null; try { eperson = ePersonService.create(context); - } catch (SQLException ex) { + } catch (SQLException | AuthorizeException ex) { context.abort(); System.err.println(ex.getMessage()); return 1; - } catch (AuthorizeException ex) { /* XXX SNH */ } + } eperson.setCanLogIn(true); eperson.setSelfRegistered(false); @@ -204,11 +217,11 @@ private static int cmdAdd(Context context, String[] argv) throws AuthorizeExcept try { ePersonService.update(context, eperson); System.out.printf("Created EPerson %s\n", eperson.getID().toString()); - } catch (SQLException ex) { + } catch (SQLException | AuthorizeException ex) { context.abort(); System.err.println(ex.getMessage()); return 1; - } catch (AuthorizeException ex) { /* XXX SNH */ } + } return 0; } @@ -315,6 +328,7 @@ private static int cmdModify(Context context, String[] argv) throws AuthorizeExc options.addOption(OPT_CAN_LOGIN); options.addOption(OPT_NEW_EMAIL); options.addOption(OPT_NEW_NETID); + options.addOption(OPT_NEW_PASSWORD); options.addOption("h", "help", false, "explain --modify options"); @@ -334,11 +348,14 @@ private static int cmdModify(Context context, String[] argv) throws AuthorizeExc // Modify! EPerson eperson = null; + String userName = null; try { if (command.hasOption(OPT_NETID.getOpt())) { - eperson = ePersonService.findByNetid(context, command.getOptionValue(OPT_NETID.getOpt())); + userName = command.getOptionValue(OPT_NETID.getOpt()); + eperson = ePersonService.findByNetid(context, userName); } else if (command.hasOption(OPT_EMAIL.getOpt())) { - eperson = ePersonService.findByEmail(context, command.getOptionValue(OPT_EMAIL.getOpt())); + userName = command.getOptionValue(OPT_EMAIL.getOpt()); + eperson = ePersonService.findByEmail(context, userName); } else { System.err.println("No EPerson selected"); return 1; @@ -361,6 +378,25 @@ private static int cmdModify(Context context, String[] argv) throws AuthorizeExc eperson.setNetid(command.getOptionValue(OPT_NEW_NETID.getOpt())); modified = true; } + if (command.hasOption(OPT_NEW_PASSWORD.getOpt())) { + char[] password1 = consoleService.readPassword( + "Enter new password for user '%s': ", userName); + char[] password2 = consoleService.readPassword( + "Enter new password again to verify: "); + if (password1.length <= 0 || password2.length <= 0) { + System.err.println(ERR_PASSWORD_EMPTY); + } else if (Arrays.equals(password1, password2)) { + PasswordHash newHashedPassword = new PasswordHash(String.valueOf(password1)); + Arrays.fill(password1, '\0'); // Obliterate cleartext passwords + Arrays.fill(password2, '\0'); + eperson.setPassword(newHashedPassword.getHashString()); + eperson.setSalt(newHashedPassword.getSaltString()); + eperson.setDigestAlgorithm(newHashedPassword.getAlgorithm()); + modified = true; + } else { + System.err.println(ERR_PASSWORD_NOMATCH); + } + } if (command.hasOption(OPT_GIVENNAME.getOpt())) { eperson.setFirstName(context, command.getOptionValue(OPT_GIVENNAME.getOpt())); modified = true; @@ -387,15 +423,16 @@ private static int cmdModify(Context context, String[] argv) throws AuthorizeExc eperson.setCanLogIn(Boolean.valueOf(command.getOptionValue(OPT_CAN_LOGIN.getOpt()))); modified = true; } + if (modified) { try { ePersonService.update(context, eperson); System.out.printf("Modified EPerson %s\n", eperson.getID().toString()); - } catch (SQLException ex) { + } catch (SQLException | AuthorizeException ex) { context.abort(); System.err.println(ex.getMessage()); return 1; - } catch (AuthorizeException ex) { /* XXX SNH */ } + } } else { System.out.println("No changes."); } @@ -407,6 +444,7 @@ private static int cmdModify(Context context, String[] argv) throws AuthorizeExc /** * Command to list known EPersons. */ + @SuppressWarnings("unused") private static int cmdList(Context context, String[] argv) { // XXX ideas: // specific user/netid @@ -427,4 +465,13 @@ private static int cmdList(Context context, String[] argv) { return 0; } + + /** + * Replace the ConsoleService for testing. + * + * @param service new ConsoleService to be used henceforth. + */ + void setConsoleService(ConsoleService service) { + consoleService = service; + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonConsumer.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonConsumer.java index 5a67ee58899f..feefe65717df 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonConsumer.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonConsumer.java @@ -7,16 +7,18 @@ */ package org.dspace.eperson; +import java.io.IOException; import java.util.Date; import java.util.UUID; import javax.mail.MessagingException; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; import org.dspace.event.Consumer; @@ -30,16 +32,17 @@ * Recommended filter: EPerson+Create * * @author Stuart Lewis - * @version $Revision$ */ public class EPersonConsumer implements Consumer { /** * log4j logger */ - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(EPersonConsumer.class); + private static final Logger log + = org.apache.logging.log4j.LogManager.getLogger(EPersonConsumer.class); protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -74,6 +77,7 @@ public void consume(Context context, Event event) if (et == Event.CREATE) { // Notify of new user registration String notifyRecipient = configurationService.getProperty("registration.notify"); + EPerson eperson = ePersonService.find(context, id); if (notifyRecipient == null) { notifyRecipient = ""; } @@ -81,7 +85,6 @@ public void consume(Context context, Event event) if (!notifyRecipient.equals("")) { try { - EPerson eperson = ePersonService.find(context, id); Email adminEmail = Email .getEmail(I18nUtil.getEmailFilename(context.getCurrentLocale(), "registration_notify")); adminEmail.addRecipient(notifyRecipient); @@ -96,13 +99,33 @@ public void consume(Context context, Event event) adminEmail.send(); - log.info(LogManager.getHeader(context, "registerion_alert", "user=" + log.info(LogHelper.getHeader(context, "registerion_alert", "user=" + eperson.getEmail())); } catch (MessagingException me) { - log.warn(LogManager.getHeader(context, + log.warn(LogHelper.getHeader(context, "error_emailing_administrator", ""), me); } } + + // If enabled, send a "welcome" message to the new EPerson. + if (configurationService.getBooleanProperty("mail.welcome.enabled", false)) { + String addressee = eperson.getEmail(); + if (StringUtils.isNotBlank(addressee)) { + log.debug("Sending welcome email to {}", addressee); + try { + Email message = Email.getEmail( + I18nUtil.getEmailFilename(context.getCurrentLocale(), "welcome")); + message.addRecipient(addressee); + message.send(); + } catch (IOException | MessagingException ex) { + log.warn("Welcome message not sent to {}: {}", + addressee, ex.getMessage()); + } + } else { + log.warn("Welcome message not sent to EPerson {} because it has no email address.", + eperson.getID().toString()); + } + } } else if (et == Event.DELETE) { // TODO: Implement this if required } diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java index 00d6a8e5e754..b9ac740685bd 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java @@ -7,6 +7,8 @@ */ package org.dspace.eperson; +import static org.dspace.content.Item.ANY; + import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; @@ -30,19 +32,25 @@ import org.dspace.content.DSpaceObjectServiceImpl; import org.dspace.content.Item; import org.dspace.content.MetadataField; +import org.dspace.content.MetadataValue; +import org.dspace.content.QAEventProcessed; import org.dspace.content.WorkspaceItem; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.core.Utils; import org.dspace.eperson.dao.EPersonDAO; import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.SubscribeService; import org.dspace.event.Event; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.qaevent.dao.QAEventsDAO; +import org.dspace.services.ConfigurationService; +import org.dspace.util.UUIDUtils; import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; import org.dspace.versioning.dao.VersionDAO; @@ -96,6 +104,12 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme protected VersionDAO versionDAO; @Autowired(required = true) protected ClaimedTaskService claimedTaskService; + @Autowired(required = true) + protected ConfigurationService configurationService; + @Autowired + protected OrcidTokenService orcidTokenService; + @Autowired + protected QAEventsDAO qaEventsDao; protected EPersonServiceImpl() { super(); @@ -106,12 +120,41 @@ public EPerson find(Context context, UUID id) throws SQLException { return ePersonDAO.findByID(context, EPerson.class, id); } + /** + * Create a fake EPerson which can receive email. Its address will be the + * value of "mail.admin", or "postmaster" if all else fails. + * @param c + * @return + * @throws SQLException + */ + @Override + public EPerson getSystemEPerson(Context c) + throws SQLException { + String adminEmail = configurationService.getProperty("mail.admin"); + if (null == adminEmail) { + adminEmail = "postmaster"; // Last-ditch attempt to send *somewhere* + } + EPerson systemEPerson = findByEmail(c, adminEmail); + + if (null == systemEPerson) { + systemEPerson = new EPerson(); + systemEPerson.setEmail(adminEmail); + } + + return systemEPerson; + } + @Override public EPerson findByIdOrLegacyId(Context context, String id) throws SQLException { - if (StringUtils.isNumeric(id)) { - return findByLegacyId(context, Integer.parseInt(id)); - } else { - return find(context, UUID.fromString(id)); + try { + if (StringUtils.isNumeric(id)) { + return findByLegacyId(context, Integer.parseInt(id)); + } else { + return find(context, UUID.fromString(id)); + } + } catch (IllegalArgumentException e) { + // Not a valid legacy ID or valid UUID + return null; } } @@ -150,32 +193,98 @@ public List search(Context context, String query) throws SQLException { @Override public List search(Context context, String query, int offset, int limit) throws SQLException { - try { - List ePerson = new ArrayList<>(); - EPerson person = find(context, UUID.fromString(query)); + List ePersons = new ArrayList<>(); + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by firstname & lastname (NOTE: email will also be included automatically) + MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); + MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); + if (StringUtils.isBlank(query)) { + query = null; + } + ePersons = ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField), + Arrays.asList(firstNameField, lastNameField), offset, limit); + } else { + // Search by UUID + EPerson person = find(context, uuid); if (person != null) { - ePerson.add(person); + ePersons.add(person); } - return ePerson; - } catch (IllegalArgumentException e) { + } + return ePersons; + } + + @Override + public int searchResultCount(Context context, String query) throws SQLException { + int result = 0; + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Count results found by firstname & lastname (email is also included automatically) MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); if (StringUtils.isBlank(query)) { query = null; } - return ePersonDAO.search(context, query, Arrays.asList(firstNameField, lastNameField), - Arrays.asList(firstNameField, lastNameField), offset, limit); + result = ePersonDAO.searchResultCount(context, query, Arrays.asList(firstNameField, lastNameField)); + } else { + // Search by UUID + EPerson person = find(context, uuid); + if (person != null) { + result = 1; + } } + return result; } @Override - public int searchResultCount(Context context, String query) throws SQLException { - MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); - MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); - if (StringUtils.isBlank(query)) { - query = null; + public List searchNonMembers(Context context, String query, Group excludeGroup, int offset, int limit) + throws SQLException { + List ePersons = new ArrayList<>(); + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by firstname & lastname (NOTE: email will also be included automatically) + MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); + MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); + if (StringUtils.isBlank(query)) { + query = null; + } + ePersons = ePersonDAO.searchNotMember(context, query, Arrays.asList(firstNameField, lastNameField), + excludeGroup, Arrays.asList(firstNameField, lastNameField), + offset, limit); + } else { + // Search by UUID + EPerson person = find(context, uuid); + // Verify EPerson is NOT a member of the given excludeGroup before adding + if (person != null && !groupService.isDirectMember(excludeGroup, person)) { + ePersons.add(person); + } } - return ePersonDAO.searchResultCount(context, query, Arrays.asList(firstNameField, lastNameField)); + + return ePersons; + } + + @Override + public int searchNonMembersCount(Context context, String query, Group excludeGroup) throws SQLException { + int result = 0; + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Count results found by firstname & lastname (email is also included automatically) + MetadataField firstNameField = metadataFieldService.findByElement(context, "eperson", "firstname", null); + MetadataField lastNameField = metadataFieldService.findByElement(context, "eperson", "lastname", null); + if (StringUtils.isBlank(query)) { + query = null; + } + result = ePersonDAO.searchNotMemberCount(context, query, Arrays.asList(firstNameField, lastNameField), + excludeGroup); + } else { + // Search by UUID + EPerson person = find(context, uuid); + // Verify EPerson is NOT a member of the given excludeGroup before counting + if (person != null && !groupService.isDirectMember(excludeGroup, person)) { + result = 1; + } + } + return result; } @Override @@ -220,7 +329,7 @@ public EPerson create(Context context) throws SQLException, AuthorizeException { // Create a table row EPerson e = ePersonDAO.create(context, new EPerson()); - log.info(LogManager.getHeader(context, "create_eperson", "eperson_id=" + log.info(LogHelper.getHeader(context, "create_eperson", "eperson_id=" + e.getID())); context.addEvent(new Event(Event.CREATE, Constants.EPERSON, e.getID(), @@ -271,10 +380,13 @@ public void delete(Context context, EPerson ePerson, boolean cascade) throw new AuthorizeException( "You must be an admin to delete an EPerson"); } + // Get all workflow-related groups that the current EPerson belongs to Set workFlowGroups = getAllWorkFlowGroups(context, ePerson); for (Group group: workFlowGroups) { - List ePeople = groupService.allMembers(context, group); - if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + // Get total number of unique EPerson objs who are a member of this group (or subgroup) + int totalMembers = groupService.countAllMembers(context, group); + // If only one EPerson is a member, then we cannot delete the last member of this group. + if (totalMembers == 1) { throw new EmptyWorkflowGroupException(ePerson.getID(), group.getID()); } } @@ -379,13 +491,20 @@ public void delete(Context context, EPerson ePerson, boolean cascade) group.getMembers().remove(ePerson); } + orcidTokenService.deleteByEPerson(context, ePerson); + // Remove any subscriptions subscribeService.deleteByEPerson(context, ePerson); + List qaEvents = qaEventsDao.findByEPerson(context, ePerson); + for (QAEventProcessed qaEvent : qaEvents) { + qaEventsDao.delete(context, qaEvent); + } + // Remove ourself ePersonDAO.delete(context, ePerson); - log.info(LogManager.getHeader(context, "delete_eperson", + log.info(LogHelper.getHeader(context, "delete_eperson", "eperson_id=" + ePerson.getID())); } @@ -486,7 +605,7 @@ public void update(Context context, EPerson ePerson) throws SQLException, Author ePersonDAO.save(context, ePerson); - log.info(LogManager.getHeader(context, "update_eperson", + log.info(LogHelper.getHeader(context, "update_eperson", "eperson_id=" + ePerson.getID())); if (ePerson.isModified()) { @@ -531,14 +650,29 @@ public List getDeleteConstraints(Context context, EPerson ePerson) throw @Override public List findByGroups(Context c, Set groups) throws SQLException { + return findByGroups(c, groups, -1, -1); + } + + @Override + public List findByGroups(Context c, Set groups, int pageSize, int offset) throws SQLException { //Make sure we at least have one group, if not don't even bother searching. if (CollectionUtils.isNotEmpty(groups)) { - return ePersonDAO.findByGroups(c, groups); + return ePersonDAO.findByGroups(c, groups, pageSize, offset); } else { return new ArrayList<>(); } } + @Override + public int countByGroups(Context c, Set groups) throws SQLException { + //Make sure we at least have one group, if not don't even bother counting. + if (CollectionUtils.isNotEmpty(groups)) { + return ePersonDAO.countByGroups(c, groups); + } else { + return 0; + } + } + @Override public List findEPeopleWithSubscription(Context context) throws SQLException { return ePersonDAO.findAllSubscribers(context); @@ -569,4 +703,18 @@ public List findNotActiveSince(Context context, Date date) throws SQLEx public int countTotal(Context context) throws SQLException { return ePersonDAO.countRows(context); } + + @Override + public EPerson findByProfileItem(Context context, Item profile) throws SQLException { + List owners = itemService.getMetadata(profile, "dspace", "object", "owner", ANY); + if (CollectionUtils.isEmpty(owners)) { + return null; + } + return find(context, UUIDUtils.fromString(owners.get(0).getAuthority())); + } + + @Override + public String getName(EPerson dso) { + return dso.getName(); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/FrequencyType.java b/dspace-api/src/main/java/org/dspace/eperson/FrequencyType.java new file mode 100644 index 000000000000..72822fb8716e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/FrequencyType.java @@ -0,0 +1,81 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Calendar; + +import org.apache.commons.codec.binary.StringUtils; + +/** + * This enum holds all the possible frequency types + * that can be used in "subscription-send" script + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public enum FrequencyType { + DAY("D"), + WEEK("W"), + MONTH("M"); + + private String shortName; + + private FrequencyType(String shortName) { + this.shortName = shortName; + } + + public static String findLastFrequency(String frequency) { + String startDate = ""; + String endDate = ""; + Calendar cal = Calendar.getInstance(); + // Full ISO 8601 is e.g. + SimpleDateFormat fullIsoStart = new SimpleDateFormat("yyyy-MM-dd'T'00:00:00'Z'"); + SimpleDateFormat fullIsoEnd = new SimpleDateFormat("yyyy-MM-dd'T'23:59:59'Z'"); + switch (frequency) { + case "D": + cal.add(Calendar.DAY_OF_MONTH, -1); + endDate = fullIsoEnd.format(cal.getTime()); + startDate = fullIsoStart.format(cal.getTime()); + break; + case "M": + int dayOfMonth = cal.get(Calendar.DAY_OF_MONTH); + cal.add(Calendar.DAY_OF_MONTH, -dayOfMonth); + endDate = fullIsoEnd.format(cal.getTime()); + cal.add(Calendar.MONTH, -1); + cal.add(Calendar.DAY_OF_MONTH, 1); + startDate = fullIsoStart.format(cal.getTime()); + break; + case "W": + cal.add(Calendar.DAY_OF_WEEK, -1); + int dayOfWeek = cal.get(Calendar.DAY_OF_WEEK) - 1; + cal.add(Calendar.DAY_OF_WEEK, -dayOfWeek); + endDate = fullIsoEnd.format(cal.getTime()); + cal.add(Calendar.DAY_OF_WEEK, -6); + startDate = fullIsoStart.format(cal.getTime()); + break; + default: + return null; + } + return "[" + startDate + " TO " + endDate + "]"; + } + + public static boolean isSupportedFrequencyType(String value) { + for (FrequencyType ft : Arrays.asList(FrequencyType.values())) { + if (StringUtils.equals(ft.getShortName(), value)) { + return true; + } + } + return false; + } + + public String getShortName() { + return shortName; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/GoogleCaptchaResponse.java b/dspace-api/src/main/java/org/dspace/eperson/GoogleCaptchaResponse.java new file mode 100644 index 000000000000..30817f243cd9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/GoogleCaptchaResponse.java @@ -0,0 +1,142 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +/** + * This model class represent the response for validation of reCaptcha token + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonPropertyOrder({ + "success", + "score", + "action", + "challenge_ts", + "hostname", + "error-codes" +}) +public class GoogleCaptchaResponse { + + @JsonProperty("success") + private boolean success; + + @JsonProperty("score") + private float score; + + @JsonProperty("action") + private String action; + + @JsonProperty("challenge_ts") + private String challengeTs; + + @JsonProperty("hostname") + private String hostname; + + @JsonProperty("error-codes") + private ErrorCode[] errorCodes; + + public boolean isSuccess() { + return success; + } + + public float getScore() { + return score; + } + + public void setScore(float score) { + this.score = score; + } + + public String getAction() { + return action; + } + + public void setAction(String action) { + this.action = action; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getChallengeTs() { + return challengeTs; + } + + public void setChallengeTs(String challengeTs) { + this.challengeTs = challengeTs; + } + + public String getHostname() { + return hostname; + } + + public void setHostname(String hostname) { + this.hostname = hostname; + } + + public ErrorCode[] getErrorCodes() { + return errorCodes; + } + + public void setErrorCodes(ErrorCode[] errorCodes) { + this.errorCodes = errorCodes; + } + + @JsonIgnore + public boolean hasClientError() { + ErrorCode[] errors = getErrorCodes(); + if (errors == null) { + return false; + } + for (ErrorCode error : errors) { + switch (error) { + case InvalidResponse: + case MissingResponse: + return true; + default: break; + } + } + return false; + } + + static enum ErrorCode { + + MissingSecret, + InvalidSecret, + MissingResponse, + InvalidResponse; + + private static Map errorsMap = new HashMap<>(4); + + static { + errorsMap.put("missing-input-secret", MissingSecret); + errorsMap.put("invalid-input-secret", InvalidSecret); + errorsMap.put("missing-input-response", MissingResponse); + errorsMap.put("invalid-input-response", InvalidResponse); + } + + @JsonCreator + public static ErrorCode forValue(String value) { + return errorsMap.get(value.toLowerCase()); + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/Groomer.java b/dspace-api/src/main/java/org/dspace/eperson/Groomer.java index 2a828cdc12b4..5485bb1d0ca9 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/Groomer.java +++ b/dspace-api/src/main/java/org/dspace/eperson/Groomer.java @@ -141,20 +141,10 @@ private static void aging(CommandLine command) throws SQLException { System.out.println(); if (delete) { - List whyNot = ePersonService.getDeleteConstraints(myContext, account); - if (!whyNot.isEmpty()) { - System.out.print("\tCannot be deleted; referenced in"); - for (String table : whyNot) { - System.out.print(' '); - System.out.print(table); - } - System.out.println(); - } else { - try { - ePersonService.delete(myContext, account); - } catch (AuthorizeException | IOException ex) { - System.err.println(ex.getMessage()); - } + try { + ePersonService.delete(myContext, account); + } catch (AuthorizeException | IOException ex) { + System.err.println(ex.getMessage()); } } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/Group.java b/dspace-api/src/main/java/org/dspace/eperson/Group.java index 09b5ce189b81..67655e0e0aaf 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/Group.java +++ b/dspace-api/src/main/java/org/dspace/eperson/Group.java @@ -23,7 +23,6 @@ import org.apache.commons.lang3.StringUtils; import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObjectLegacySupport; -import org.dspace.content.WorkspaceItem; import org.dspace.core.Constants; import org.dspace.core.Context; import org.hibernate.annotations.CacheConcurrencyStrategy; @@ -83,9 +82,6 @@ public class Group extends DSpaceObject implements DSpaceObjectLegacySupport { @ManyToMany(fetch = FetchType.LAZY, mappedBy = "groups") private final List parentGroups = new ArrayList<>(); - @ManyToMany(fetch = FetchType.LAZY, mappedBy = "supervisorGroups") - private final List supervisedItems = new ArrayList<>(); - @Transient private boolean groupsChanged; @@ -102,7 +98,11 @@ void addMember(EPerson e) { } /** - * Return EPerson members of a Group + * Return EPerson members of a Group. + *

    + * WARNING: This method may have bad performance for Groups with large numbers of EPerson members. + * Therefore, only use this when you need to access every EPerson member. Instead, consider using + * EPersonService.findByGroups() for a paginated list of EPersons. * * @return list of EPersons */ @@ -147,9 +147,13 @@ List getParentGroups() { } /** - * Return Group members of a Group. + * Return Group members (i.e. direct subgroups) of a Group. + *

    + * WARNING: This method may have bad performance for Groups with large numbers of Subgroups. + * Therefore, only use this when you need to access every Subgroup. Instead, consider using + * GroupService.findByParent() for a paginated list of Subgroups. * - * @return list of groups + * @return list of subgroups */ public List getMemberGroups() { return groups; @@ -201,7 +205,7 @@ public String getName() { void setName(String name) throws SQLException { if (!StringUtils.equals(this.name, name) && !isPermanent()) { this.name = name; - groupsChanged = true; + setMetadataModified(); } } @@ -218,10 +222,6 @@ public Integer getLegacyId() { return legacyId; } - public List getSupervisedItems() { - return supervisedItems; - } - /** * May this Group be renamed or deleted? (The content of any group may be * changed.) diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index 35c0cc796360..730053e42ce2 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -15,12 +15,15 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.UUID; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeConfiguration; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; @@ -34,7 +37,7 @@ import org.dspace.content.service.CommunityService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.dao.Group2GroupCacheDAO; import org.dspace.eperson.dao.GroupDAO; import org.dspace.eperson.factory.EPersonServiceFactory; @@ -51,8 +54,6 @@ import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService; import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService; import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; /** @@ -63,7 +64,7 @@ * @author kevinvandevelde at atmire.com */ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements GroupService { - private static final Logger log = LoggerFactory.getLogger(GroupServiceImpl.class); + private static final Logger log = LogManager.getLogger(); @Autowired(required = true) protected GroupDAO groupDAO; @@ -110,7 +111,7 @@ public Group create(Context context) throws SQLException, AuthorizeException { // Create a table row Group g = groupDAO.create(context, new Group()); - log.info(LogManager.getHeader(context, "create_group", "group_id=" + log.info(LogHelper.getHeader(context, "create_group", "group_id=" + g.getID())); context.addEvent(new Event(Event.CREATE, Constants.GROUP, g.getID(), null, getIdentifiers(context, g))); @@ -178,8 +179,13 @@ public void removeMember(Context context, Group group, EPerson ePerson) throws S for (CollectionRole collectionRole : collectionRoles) { if (StringUtils.equals(collectionRole.getRoleId(), role.getId()) && claimedTask.getWorkflowItem().getCollection() == collectionRole.getCollection()) { - List ePeople = allMembers(context, group); - if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + // Count number of EPersons who are *direct* members of this group + int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(group)); + // Count number of Groups which have this groupParent as a direct parent + int totalChildGroups = countByParent(context, group); + // If this group has only one direct EPerson and *zero* child groups, then we cannot delete the + // EPerson or we will leave this group empty. + if (totalDirectEPersons == 1 && totalChildGroups == 0) { throw new IllegalStateException( "Refused to remove user " + ePerson .getID() + " from workflow group because the group " + group @@ -190,8 +196,13 @@ public void removeMember(Context context, Group group, EPerson ePerson) throws S } } if (!poolTasks.isEmpty()) { - List ePeople = allMembers(context, group); - if (ePeople.size() == 1 && ePeople.contains(ePerson)) { + // Count number of EPersons who are *direct* members of this group + int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(group)); + // Count number of Groups which have this groupParent as a direct parent + int totalChildGroups = countByParent(context, group); + // If this group has only one direct EPerson and *zero* child groups, then we cannot delete the + // EPerson or we will leave this group empty. + if (totalDirectEPersons == 1 && totalChildGroups == 0) { throw new IllegalStateException( "Refused to remove user " + ePerson .getID() + " from workflow group because the group " + group @@ -211,9 +222,13 @@ public void removeMember(Context context, Group groupParent, Group childGroup) t if (!collectionRoles.isEmpty()) { List poolTasks = poolTaskService.findByGroup(context, groupParent); if (!poolTasks.isEmpty()) { - List parentPeople = allMembers(context, groupParent); - List childPeople = allMembers(context, childGroup); - if (childPeople.containsAll(parentPeople)) { + // Count number of Groups which have this groupParent as a direct parent + int totalChildGroups = countByParent(context, groupParent); + // Count number of EPersons who are *direct* members of this group + int totalDirectEPersons = ePersonService.countByGroups(context, Set.of(groupParent)); + // If this group has only one childGroup and *zero* direct EPersons, then we cannot delete the + // childGroup or we will leave this group empty. + if (totalChildGroups == 1 && totalDirectEPersons == 0) { throw new IllegalStateException( "Refused to remove sub group " + childGroup .getID() + " from workflow group because the group " + groupParent @@ -352,8 +367,6 @@ public Set allMemberGroupsSet(Context context, EPerson ePerson) throws SQ List groupCache = group2GroupCacheDAO.findByChildren(context, groups); // now we have all owning groups, also grab all parents of owning groups - // yes, I know this could have been done as one big query and a union, - // but doing the Oracle port taught me to keep to simple SQL! for (Group2GroupCache group2GroupCache : groupCache) { groups.add(group2GroupCache.getParent()); } @@ -369,7 +382,8 @@ public List allMembers(Context c, Group g) throws SQLException { // Get all groups which are a member of this group List group2GroupCaches = group2GroupCacheDAO.findByParent(c, g); - Set groups = new HashSet<>(); + // Initialize HashSet based on List size to avoid Set resizing. See https://stackoverflow.com/a/21822273 + Set groups = new HashSet<>((int) (group2GroupCaches.size() / 0.75 + 1)); for (Group2GroupCache group2GroupCache : group2GroupCaches) { groups.add(group2GroupCache.getChild()); } @@ -382,6 +396,23 @@ public List allMembers(Context c, Group g) throws SQLException { return new ArrayList<>(childGroupChildren); } + @Override + public int countAllMembers(Context context, Group group) throws SQLException { + // Get all groups which are a member of this group + List group2GroupCaches = group2GroupCacheDAO.findByParent(context, group); + // Initialize HashSet based on List size + current 'group' to avoid Set resizing. + // See https://stackoverflow.com/a/21822273 + Set groups = new HashSet<>((int) ((group2GroupCaches.size() + 1) / 0.75 + 1)); + for (Group2GroupCache group2GroupCache : group2GroupCaches) { + groups.add(group2GroupCache.getChild()); + } + // Append current group as well + groups.add(group); + + // Return total number of unique EPerson objects in any of these groups + return ePersonService.countByGroups(context, groups); + } + @Override public Group find(Context context, UUID id) throws SQLException { if (id == null) { @@ -429,17 +460,17 @@ public List findAll(Context context, List metadataSortFiel } @Override - public List search(Context context, String groupIdentifier) throws SQLException { - return search(context, groupIdentifier, -1, -1); + public List search(Context context, String query) throws SQLException { + return search(context, query, -1, -1); } @Override - public List search(Context context, String groupIdentifier, int offset, int limit) throws SQLException { + public List search(Context context, String query, int offset, int limit) throws SQLException { List groups = new ArrayList<>(); - UUID uuid = UUIDUtils.fromString(groupIdentifier); + UUID uuid = UUIDUtils.fromString(query); if (uuid == null) { //Search by group name - groups = groupDAO.findByNameLike(context, groupIdentifier, offset, limit); + groups = groupDAO.findByNameLike(context, query, offset, limit); } else { //Search by group id Group group = find(context, uuid); @@ -452,12 +483,12 @@ public List search(Context context, String groupIdentifier, int offset, i } @Override - public int searchResultCount(Context context, String groupIdentifier) throws SQLException { + public int searchResultCount(Context context, String query) throws SQLException { int result = 0; - UUID uuid = UUIDUtils.fromString(groupIdentifier); + UUID uuid = UUIDUtils.fromString(query); if (uuid == null) { //Search by group name - result = groupDAO.countByNameLike(context, groupIdentifier); + result = groupDAO.countByNameLike(context, query); } else { //Search by group id Group group = find(context, uuid); @@ -469,19 +500,54 @@ public int searchResultCount(Context context, String groupIdentifier) throws SQL return result; } + @Override + public List searchNonMembers(Context context, String query, Group excludeParentGroup, + int offset, int limit) throws SQLException { + List groups = new ArrayList<>(); + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by group name + groups = groupDAO.findByNameLikeAndNotMember(context, query, excludeParentGroup, offset, limit); + } else if (!uuid.equals(excludeParentGroup.getID())) { + // Search by group id + Group group = find(context, uuid); + // Verify it is NOT a member of the given excludeParentGroup before adding + if (group != null && !isMember(excludeParentGroup, group)) { + groups.add(group); + } + } + + return groups; + } + + @Override + public int searchNonMembersCount(Context context, String query, Group excludeParentGroup) throws SQLException { + int result = 0; + UUID uuid = UUIDUtils.fromString(query); + if (uuid == null) { + // Search by group name + result = groupDAO.countByNameLikeAndNotMember(context, query, excludeParentGroup); + } else if (!uuid.equals(excludeParentGroup.getID())) { + // Search by group id + Group group = find(context, uuid); + // Verify it is NOT a member of the given excludeParentGroup before adding + if (group != null && !isMember(excludeParentGroup, group)) { + result = 1; + } + } + return result; + } + @Override public void delete(Context context, Group group) throws SQLException { if (group.isPermanent()) { - log.error("Attempt to delete permanent Group $", group.getName()); + log.error("Attempt to delete permanent Group {}", group::getName); throw new SQLException("Attempt to delete a permanent Group"); } context.addEvent(new Event(Event.DELETE, Constants.GROUP, group.getID(), group.getName(), getIdentifiers(context, group))); - //Remove the supervised group from any workspace items linked to us. - group.getSupervisedItems().clear(); - // Remove any ResourcePolicies that reference this group authorizeService.removeGroupPolicies(context, group); @@ -502,7 +568,7 @@ public void delete(Context context, Group group) throws SQLException { groupDAO.delete(context, group); rethinkGroupCache(context, false); - log.info(LogManager.getHeader(context, "delete_group", "group_id=" + log.info(LogHelper.getHeader(context, "delete_group", "group_id=" + group.getID())); } @@ -595,7 +661,7 @@ public void update(Context context, Group group) throws SQLException, AuthorizeE group.clearGroupsChanged(); } - log.info(LogManager.getHeader(context, "update_group", "group_id=" + log.info(LogHelper.getHeader(context, "update_group", "group_id=" + group.getID())); } @@ -714,7 +780,7 @@ public DSpaceObject getParentObject(Context context, Group group) throws SQLExce // if the group is used for one or more roles on a single collection, // admins can eventually manage it List collectionRoles = collectionRoleService.findByGroup(context, group); - if (collectionRoles != null && collectionRoles.size() > 0) { + if (collectionRoles != null && !collectionRoles.isEmpty()) { Set colls = new HashSet<>(); for (CollectionRole cr : collectionRoles) { colls.add(cr.getCollection()); @@ -735,13 +801,24 @@ public DSpaceObject getParentObject(Context context, Group group) throws SQLExce groups.add(group); List policies = resourcePolicyService.find(context, null, groups, Constants.DEFAULT_ITEM_READ, Constants.COLLECTION); - if (policies.size() > 0) { - return policies.get(0).getdSpaceObject(); + + Optional defaultPolicy = policies.stream().filter(p -> StringUtils.equals( + collectionService.getDefaultReadGroupName((Collection) p.getdSpaceObject(), "ITEM"), + group.getName())).findFirst(); + + if (defaultPolicy.isPresent()) { + return defaultPolicy.get().getdSpaceObject(); } policies = resourcePolicyService.find(context, null, groups, Constants.DEFAULT_BITSTREAM_READ, Constants.COLLECTION); - if (policies.size() > 0) { - return policies.get(0).getdSpaceObject(); + + defaultPolicy = policies.stream() + .filter(p -> StringUtils.equals(collectionService.getDefaultReadGroupName( + (Collection) p.getdSpaceObject(), "BITSTREAM"), group.getName())) + .findFirst(); + + if (defaultPolicy.isPresent()) { + return defaultPolicy.get().getdSpaceObject(); } } } @@ -795,10 +872,15 @@ protected Set getChildren(Map> parents, UUID parent) { @Override public Group findByIdOrLegacyId(Context context, String id) throws SQLException { - if (org.apache.commons.lang3.StringUtils.isNumeric(id)) { - return findByLegacyId(context, Integer.parseInt(id)); - } else { - return find(context, UUIDUtils.fromString(id)); + try { + if (StringUtils.isNumeric(id)) { + return findByLegacyId(context, Integer.parseInt(id)); + } else { + return find(context, UUID.fromString(id)); + } + } catch (IllegalArgumentException e) { + // Not a valid legacy ID or valid UUID + return null; } } @@ -817,4 +899,25 @@ public List findByMetadataField(final Context context, final String searc final MetadataField metadataField) throws SQLException { return groupDAO.findByMetadataField(context, searchValue, metadataField); } + + @Override + public String getName(Group dso) { + return dso.getName(); + } + + @Override + public List findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException { + if (parent == null) { + return null; + } + return groupDAO.findByParent(context, parent, pageSize, offset); + } + + @Override + public int countByParent(Context context, Group parent) throws SQLException { + if (parent == null) { + return 0; + } + return groupDAO.countByParent(context, parent); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/InvalidReCaptchaException.java b/dspace-api/src/main/java/org/dspace/eperson/InvalidReCaptchaException.java new file mode 100644 index 000000000000..3d6584057f17 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/InvalidReCaptchaException.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +/** + * This class provides an exception to be used when trying to register a new EPerson + * and Captcha validations failed. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class InvalidReCaptchaException extends RuntimeException { + + private static final long serialVersionUID = -5328794674744121744L; + + public InvalidReCaptchaException(String message) { + super(message); + } + + public InvalidReCaptchaException(String message, Exception cause) { + super(message, cause); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/LoadLastLogin.java b/dspace-api/src/main/java/org/dspace/eperson/LoadLastLogin.java index b6bb99c442ca..390340affdc9 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/LoadLastLogin.java +++ b/dspace-api/src/main/java/org/dspace/eperson/LoadLastLogin.java @@ -11,6 +11,7 @@ import java.io.File; import java.io.FileReader; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; @@ -108,7 +109,7 @@ public static void main(String[] argv) final SimpleDateFormat dateEncoder = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); for (String logName : args) { - BufferedReader logReader = new BufferedReader(new FileReader(logName)); + BufferedReader logReader = new BufferedReader(new FileReader(logName, StandardCharsets.UTF_8)); while (true) { String line = logReader.readLine(); // End of file? diff --git a/dspace-api/src/main/java/org/dspace/eperson/SubscribeCLITool.java b/dspace-api/src/main/java/org/dspace/eperson/SubscribeCLITool.java deleted file mode 100644 index 9a5d699965be..000000000000 --- a/dspace-api/src/main/java/org/dspace/eperson/SubscribeCLITool.java +++ /dev/null @@ -1,432 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.eperson; - -import java.io.IOException; -import java.sql.SQLException; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Calendar; -import java.util.Date; -import java.util.List; -import java.util.Locale; -import java.util.ResourceBundle; -import java.util.TimeZone; -import javax.mail.MessagingException; - -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.Logger; -import org.dspace.content.Collection; -import org.dspace.content.DCDate; -import org.dspace.content.Item; -import org.dspace.content.MetadataSchemaEnum; -import org.dspace.content.MetadataValue; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; -import org.dspace.core.Email; -import org.dspace.core.I18nUtil; -import org.dspace.core.LogManager; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.eperson.service.SubscribeService; -import org.dspace.handle.factory.HandleServiceFactory; -import org.dspace.handle.service.HandleService; -import org.dspace.search.Harvest; -import org.dspace.search.HarvestedItemInfo; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; - -/** - * CLI tool used for sending new item e-mail alerts to users - * - * @author Robert Tansley - * @version $Revision$ - */ -public class SubscribeCLITool { - - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SubscribeCLITool.class); - - private static final HandleService handleService - = HandleServiceFactory.getInstance().getHandleService(); - private static final ItemService itemService - = ContentServiceFactory.getInstance().getItemService(); - private static final SubscribeService subscribeService - = EPersonServiceFactory.getInstance().getSubscribeService(); - private static final ConfigurationService configurationService - = DSpaceServicesFactory.getInstance().getConfigurationService(); - - /** - * Default constructor - */ - private SubscribeCLITool() { } - - /** - * Process subscriptions. This must be invoked only once a day. Messages are - * only sent out when a collection has actually received new items, so that - * people's mailboxes are not clogged with many "no new items" mails. - *

    - * Yesterday's newly available items are included. If this is run at for - * example midday, any items that have been made available during the - * current day will not be included, but will be included in the next day's - * run. - *

    - * For example, if today's date is 2002-10-10 (in UTC) items made available - * during 2002-10-09 (UTC) will be included. - * - * @param context The relevant DSpace Context. - * @param test If true, do a "dry run", i.e. don't actually send email, just log the attempt - * @throws SQLException An exception that provides information on a database access error or other errors. - * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. - */ - public static void processDaily(Context context, boolean test) throws SQLException, - IOException { - // Grab the subscriptions - - List subscriptions = subscribeService.findAll(context); - - EPerson currentEPerson = null; - List collections = null; // List of Collections - - // Go through the list collating subscriptions for each e-person - for (Subscription subscription : subscriptions) { - // Does this row relate to the same e-person as the last? - if ((currentEPerson == null) - || (!subscription.getePerson().getID().equals(currentEPerson - .getID()))) { - // New e-person. Send mail for previous e-person - if (currentEPerson != null) { - - try { - sendEmail(context, currentEPerson, collections, test); - } catch (MessagingException me) { - log.error("Failed to send subscription to eperson_id=" - + currentEPerson.getID()); - log.error(me); - } - } - - currentEPerson = subscription.getePerson(); - collections = new ArrayList<>(); - } - - collections.add(subscription.getCollection()); - } - - // Process the last person - if (currentEPerson != null) { - try { - sendEmail(context, currentEPerson, collections, test); - } catch (MessagingException me) { - log.error("Failed to send subscription to eperson_id=" - + currentEPerson.getID()); - log.error(me); - } - } - } - - /** - * Sends an email to the given e-person with details of new items in the - * given collections, items that appeared yesterday. No e-mail is sent if - * there aren't any new items in any of the collections. - * - * @param context DSpace context object - * @param eperson eperson to send to - * @param collections List of collection IDs (Integers) - * @param test If true, do a "dry run", i.e. don't actually send email, just log the attempt - * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. - * @throws MessagingException A general class of exceptions for sending email. - * @throws SQLException An exception that provides information on a database access error or other errors. - */ - public static void sendEmail(Context context, EPerson eperson, - List collections, boolean test) throws IOException, MessagingException, - SQLException { - // Get a resource bundle according to the eperson language preferences - Locale supportedLocale = I18nUtil.getEPersonLocale(eperson); - ResourceBundle labels = ResourceBundle.getBundle("Messages", supportedLocale); - - // Get the start and end dates for yesterday - - // The date should reflect the timezone as well. Otherwise we stand to lose that information - // in truncation and roll to an earlier date than intended. - Calendar cal = Calendar.getInstance(TimeZone.getDefault()); - cal.setTime(new Date()); - - // What we actually want to pass to Harvest is "Midnight of yesterday in my current timezone" - // Truncation will actually pass in "Midnight of yesterday in UTC", which will be, - // at least in CDT, "7pm, the day before yesterday, in my current timezone". - cal.add(Calendar.HOUR, -24); - cal.set(Calendar.HOUR_OF_DAY, 0); - cal.set(Calendar.MINUTE, 0); - cal.set(Calendar.SECOND, 0); - Date midnightYesterday = cal.getTime(); - - - // FIXME: text of email should be more configurable from an - // i18n viewpoint - StringBuilder emailText = new StringBuilder(); - boolean isFirst = true; - - for (int i = 0; i < collections.size(); i++) { - Collection c = collections.get(i); - - try { - boolean includeAll = configurationService - .getBooleanProperty("harvest.includerestricted.subscription", true); - - // we harvest all the changed item from yesterday until now - List itemInfos = Harvest - .harvest(context, c, new DCDate(midnightYesterday).toString(), null, 0, // Limit - // and - // offset - // zero, - // get - // everything - 0, true, // Need item objects - false, // But not containers - false, // Or withdrawals - includeAll); - - if (configurationService.getBooleanProperty("eperson.subscription.onlynew", false)) { - // get only the items archived yesterday - itemInfos = filterOutModified(itemInfos); - } else { - // strip out the item archived today or - // not archived yesterday and modified today - itemInfos = filterOutToday(itemInfos); - } - - // Only add to buffer if there are new items - if (itemInfos.size() > 0) { - if (!isFirst) { - emailText - .append("\n---------------------------------------\n"); - } else { - isFirst = false; - } - - emailText.append(labels.getString("org.dspace.eperson.Subscribe.new-items")).append(" ").append( - c.getName()).append(": ").append( - itemInfos.size()).append("\n\n"); - - for (int j = 0; j < itemInfos.size(); j++) { - HarvestedItemInfo hii = (HarvestedItemInfo) itemInfos - .get(j); - - String title = hii.item.getName(); - emailText.append(" ").append(labels.getString("org.dspace.eperson.Subscribe.title")) - .append(" "); - - if (StringUtils.isNotBlank(title)) { - emailText.append(title); - } else { - emailText.append(labels.getString("org.dspace.eperson.Subscribe.untitled")); - } - - List authors = itemService - .getMetadata(hii.item, MetadataSchemaEnum.DC.getName(), "contributor", Item.ANY, Item.ANY); - - if (authors.size() > 0) { - emailText.append("\n ").append(labels.getString("org.dspace.eperson.Subscribe.authors")) - .append(" ").append( - authors.get(0).getValue()); - - for (int k = 1; k < authors.size(); k++) { - emailText.append("\n ").append( - authors.get(k).getValue()); - } - } - - emailText.append("\n ").append(labels.getString("org.dspace.eperson.Subscribe.id")) - .append(" ").append( - handleService.getCanonicalForm(hii.handle)).append( - "\n\n"); - } - } - } catch (ParseException pe) { - // This should never get thrown as the Dates are auto-generated - } - } - - // Send an e-mail if there were any new items - if (emailText.length() > 0) { - - if (test) { - log.info(LogManager.getHeader(context, "subscription:", "eperson=" + eperson.getEmail())); - log.info(LogManager.getHeader(context, "subscription:", "text=" + emailText.toString())); - - } else { - - Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "subscription")); - email.addRecipient(eperson.getEmail()); - email.addArgument(emailText.toString()); - email.send(); - - log.info(LogManager.getHeader(context, "sent_subscription", "eperson_id=" + eperson.getID())); - - } - - - } - } - - /** - * Method for invoking subscriptions via the command line - * - * @param argv the command line arguments given - */ - public static void main(String[] argv) { - String usage = "org.dspace.eperson.Subscribe [-t] or nothing to send out subscriptions."; - - Options options = new Options(); - HelpFormatter formatter = new HelpFormatter(); - CommandLine line = null; - - { - Option opt = new Option("t", "test", false, "Run test session"); - opt.setRequired(false); - options.addOption(opt); - } - - { - Option opt = new Option("h", "help", false, "Print this help message"); - opt.setRequired(false); - options.addOption(opt); - } - - try { - line = new DefaultParser().parse(options, argv); - } catch (org.apache.commons.cli.ParseException e) { - // automatically generate the help statement - formatter.printHelp(usage, e.getMessage(), options, ""); - System.exit(1); - } - - if (line.hasOption("h")) { - // automatically generate the help statement - formatter.printHelp(usage, options); - System.exit(1); - } - - boolean test = line.hasOption("t"); - - Context context = null; - - try { - context = new Context(Context.Mode.READ_ONLY); - processDaily(context, test); - context.complete(); - } catch (IOException | SQLException e) { - log.fatal(e); - } finally { - if (context != null && context.isValid()) { - // Nothing is actually written - context.abort(); - } - } - } - - private static List filterOutToday(List completeList) { - log.debug("Filtering out all today item to leave new items list size=" - + completeList.size()); - List filteredList = new ArrayList<>(); - - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); - String today = sdf.format(new Date()); - // Get the start and end dates for yesterday - Date thisTimeYesterday = new Date(System.currentTimeMillis() - - (24 * 60 * 60 * 1000)); - String yesterday = sdf.format(thisTimeYesterday); - - for (HarvestedItemInfo infoObject : completeList) { - Date lastUpdate = infoObject.item.getLastModified(); - String lastUpdateStr = sdf.format(lastUpdate); - - // has the item modified today? - if (lastUpdateStr.equals(today)) { - List dateAccArr = itemService.getMetadata(infoObject.item, "dc", - "date", "accessioned", Item.ANY); - // we need only the item archived yesterday - if (dateAccArr != null && dateAccArr.size() > 0) { - for (MetadataValue date : dateAccArr) { - if (date != null && date.getValue() != null) { - // if it hasn't been archived today - if (date.getValue().startsWith(yesterday)) { - filteredList.add(infoObject); - log.debug("adding : " + dateAccArr.get(0).getValue() - + " : " + today + " : " - + infoObject.handle); - break; - } else { - log.debug("ignoring : " + dateAccArr.get(0).getValue() - + " : " + today + " : " - + infoObject.handle); - } - } - } - } else { - log.debug("no date accessioned, adding : " - + infoObject.handle); - filteredList.add(infoObject); - } - } else { - // the item has been modified yesterday... - filteredList.add(infoObject); - } - } - - return filteredList; - } - - private static List filterOutModified(List completeList) { - log.debug("Filtering out all modified to leave new items list size=" + completeList.size()); - List filteredList = new ArrayList<>(); - - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); - // Get the start and end dates for yesterday - Date thisTimeYesterday = new Date(System.currentTimeMillis() - - (24 * 60 * 60 * 1000)); - String yesterday = sdf.format(thisTimeYesterday); - - for (HarvestedItemInfo infoObject : completeList) { - List dateAccArr = itemService - .getMetadata(infoObject.item, "dc", "date", "accessioned", Item.ANY); - - if (dateAccArr != null && dateAccArr.size() > 0) { - for (MetadataValue date : dateAccArr) { - if (date != null && date.getValue() != null) { - // if it has been archived yesterday - if (date.getValue().startsWith(yesterday)) { - filteredList.add(infoObject); - log.debug("adding : " + dateAccArr.get(0) - .getValue() + " : " + yesterday + " : " + infoObject - .handle); - break; - } else { - log.debug("ignoring : " + dateAccArr.get(0) - .getValue() + " : " + yesterday + " : " + infoObject - .handle); - } - } - } - - - } else { - log.debug("no date accessioned, adding : " + infoObject.handle); - filteredList.add(infoObject); - } - } - - return filteredList; - } -} diff --git a/dspace-api/src/main/java/org/dspace/eperson/SubscribeServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/SubscribeServiceImpl.java index 34dbcdbeaa72..2e4d94f4431e 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/SubscribeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/SubscribeServiceImpl.java @@ -9,15 +9,20 @@ import java.sql.SQLException; import java.util.List; +import java.util.Objects; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; import org.dspace.content.service.CollectionService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.dao.SubscriptionDAO; import org.dspace.eperson.service.SubscribeService; import org.springframework.beans.factory.annotation.Autowired; @@ -29,106 +34,177 @@ * @version $Revision$ */ public class SubscribeServiceImpl implements SubscribeService { - /** - * log4j logger - */ - private Logger log = org.apache.logging.log4j.LogManager.getLogger(SubscribeServiceImpl.class); - @Autowired(required = true) - protected SubscriptionDAO subscriptionDAO; + private Logger log = LogManager.getLogger(SubscribeServiceImpl.class); @Autowired(required = true) - protected AuthorizeService authorizeService; + private SubscriptionDAO subscriptionDAO; @Autowired(required = true) - protected CollectionService collectionService; - - protected SubscribeServiceImpl() { - - } + private AuthorizeService authorizeService; + @Autowired(required = true) + private CollectionService collectionService; @Override - public List findAll(Context context) throws SQLException { - return subscriptionDAO.findAllOrderedByEPerson(context); + public List findAll(Context context, String resourceType, Integer limit, Integer offset) + throws Exception { + if (StringUtils.isBlank(resourceType)) { + return subscriptionDAO.findAllOrderedByDSO(context, limit, offset); + } else { + if (resourceType.equals(Collection.class.getSimpleName()) || + resourceType.equals(Community.class.getSimpleName())) { + return subscriptionDAO.findAllOrderedByIDAndResourceType(context, resourceType, limit, offset); + } else { + log.error("Resource type must be Collection or Community"); + throw new Exception("Resource type must be Collection or Community"); + } + } } @Override - public void subscribe(Context context, EPerson eperson, - Collection collection) throws SQLException, AuthorizeException { + public Subscription subscribe(Context context, EPerson eperson, + DSpaceObject dSpaceObject, + List subscriptionParameterList, + String type) throws SQLException, AuthorizeException { // Check authorisation. Must be administrator, or the eperson. if (authorizeService.isAdmin(context) - || ((context.getCurrentUser() != null) && (context - .getCurrentUser().getID().equals(eperson.getID())))) { - if (!isSubscribed(context, eperson, collection)) { - Subscription subscription = subscriptionDAO.create(context, new Subscription()); - subscription.setCollection(collection); - subscription.setePerson(eperson); - } + || ((context.getCurrentUser() != null) && (context + .getCurrentUser().getID().equals(eperson.getID())))) { + Subscription newSubscription = subscriptionDAO.create(context, new Subscription()); + subscriptionParameterList.forEach(subscriptionParameter -> + newSubscription.addParameter(subscriptionParameter)); + newSubscription.setEPerson(eperson); + newSubscription.setDSpaceObject(dSpaceObject); + newSubscription.setSubscriptionType(type); + return newSubscription; } else { - throw new AuthorizeException( - "Only admin or e-person themselves can subscribe"); + throw new AuthorizeException("Only admin or e-person themselves can subscribe"); } } @Override - public void unsubscribe(Context context, EPerson eperson, - Collection collection) throws SQLException, AuthorizeException { + public void unsubscribe(Context context, EPerson eperson, DSpaceObject dSpaceObject) + throws SQLException, AuthorizeException { // Check authorisation. Must be administrator, or the eperson. if (authorizeService.isAdmin(context) - || ((context.getCurrentUser() != null) && (context - .getCurrentUser().getID().equals(eperson.getID())))) { - if (collection == null) { + || ((context.getCurrentUser() != null) && (context + .getCurrentUser().getID().equals(eperson.getID())))) { + if (dSpaceObject == null) { // Unsubscribe from all subscriptionDAO.deleteByEPerson(context, eperson); } else { - subscriptionDAO.deleteByCollectionAndEPerson(context, collection, eperson); + subscriptionDAO.deleteByDSOAndEPerson(context, dSpaceObject, eperson); - log.info(LogManager.getHeader(context, "unsubscribe", + log.info(LogHelper.getHeader(context, "unsubscribe", "eperson_id=" + eperson.getID() + ",collection_id=" - + collection.getID())); + + dSpaceObject.getID())); } } else { - throw new AuthorizeException( - "Only admin or e-person themselves can unsubscribe"); + throw new AuthorizeException("Only admin or e-person themselves can unsubscribe"); } } @Override - public List getSubscriptions(Context context, EPerson eperson) - throws SQLException { - return subscriptionDAO.findByEPerson(context, eperson); + public List findSubscriptionsByEPerson(Context context, EPerson eperson, Integer limit,Integer offset) + throws SQLException { + return subscriptionDAO.findByEPerson(context, eperson, limit, offset); } @Override - public List getAvailableSubscriptions(Context context) - throws SQLException { - return getAvailableSubscriptions(context, null); + public List findSubscriptionsByEPersonAndDso(Context context, EPerson eperson, + DSpaceObject dSpaceObject, + Integer limit, Integer offset) throws SQLException { + return subscriptionDAO.findByEPersonAndDso(context, eperson, dSpaceObject, limit, offset); } @Override - public List getAvailableSubscriptions(Context context, EPerson eperson) - throws SQLException { - List collections; - if (eperson != null) { + public List findAvailableSubscriptions(Context context) throws SQLException { + return findAvailableSubscriptions(context, null); + } + + @Override + public List findAvailableSubscriptions(Context context, EPerson eperson) throws SQLException { + if (Objects.nonNull(eperson)) { context.setCurrentUser(eperson); } - collections = collectionService.findAuthorized(context, null, Constants.ADD); - - return collections; + return collectionService.findAuthorized(context, null, Constants.ADD); } @Override - public boolean isSubscribed(Context context, EPerson eperson, - Collection collection) throws SQLException { - return subscriptionDAO.findByCollectionAndEPerson(context, eperson, collection) != null; + public boolean isSubscribed(Context context, EPerson eperson, DSpaceObject dSpaceObject) throws SQLException { + return subscriptionDAO.findByEPersonAndDso(context, eperson, dSpaceObject, -1, -1) != null; } @Override - public void deleteByCollection(Context context, Collection collection) throws SQLException { - subscriptionDAO.deleteByCollection(context, collection); + public void deleteByDspaceObject(Context context, DSpaceObject dSpaceObject) throws SQLException { + subscriptionDAO.deleteByDspaceObject(context, dSpaceObject); } @Override public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException { subscriptionDAO.deleteByEPerson(context, ePerson); } + + @Override + public Subscription findById(Context context, int id) throws SQLException { + return subscriptionDAO.findByID(context, Subscription.class, id); + } + + @Override + public Subscription updateSubscription(Context context, Integer id, String subscriptionType, + List subscriptionParameterList) + throws SQLException { + Subscription subscriptionDB = subscriptionDAO.findByID(context, Subscription.class, id); + subscriptionDB.removeParameterList(); + subscriptionDB.setSubscriptionType(subscriptionType); + subscriptionParameterList.forEach(x -> subscriptionDB.addParameter(x)); + subscriptionDAO.save(context, subscriptionDB); + return subscriptionDB; + } + + @Override + public Subscription addSubscriptionParameter(Context context, Integer id, SubscriptionParameter subscriptionParam) + throws SQLException { + Subscription subscriptionDB = subscriptionDAO.findByID(context, Subscription.class, id); + subscriptionDB.addParameter(subscriptionParam); + subscriptionDAO.save(context, subscriptionDB); + return subscriptionDB; + } + + @Override + public Subscription removeSubscriptionParameter(Context context,Integer id, SubscriptionParameter subscriptionParam) + throws SQLException { + Subscription subscriptionDB = subscriptionDAO.findByID(context, Subscription.class, id); + subscriptionDB.removeParameter(subscriptionParam); + subscriptionDAO.save(context, subscriptionDB); + return subscriptionDB; + } + + @Override + public void deleteSubscription(Context context, Subscription subscription) throws SQLException { + subscriptionDAO.delete(context, subscription); + } + + @Override + public List findAllSubscriptionsBySubscriptionTypeAndFrequency(Context context, + String subscriptionType, String frequencyValue) throws SQLException { + return subscriptionDAO.findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, + frequencyValue); + } + + @Override + public Long countAll(Context context) throws SQLException { + return subscriptionDAO.countAll(context); + } + + @Override + public Long countSubscriptionsByEPerson(Context context, EPerson ePerson) throws SQLException { + return subscriptionDAO.countAllByEPerson(context, ePerson); + } + + @Override + public Long countByEPersonAndDSO(Context context, EPerson ePerson, DSpaceObject dSpaceObject) + throws SQLException { + return subscriptionDAO.countAllByEPersonAndDso(context, ePerson, dSpaceObject); + } + } diff --git a/dspace-api/src/main/java/org/dspace/eperson/Subscription.java b/dspace-api/src/main/java/org/dspace/eperson/Subscription.java index abe8ad481c8e..5db63740f477 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/Subscription.java +++ b/dspace-api/src/main/java/org/dspace/eperson/Subscription.java @@ -7,6 +7,9 @@ */ package org.dspace.eperson; +import java.util.ArrayList; +import java.util.List; +import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; @@ -15,10 +18,11 @@ import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; +import javax.persistence.OneToMany; import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; import org.dspace.core.Context; import org.dspace.core.ReloadableEntity; @@ -37,39 +41,78 @@ public class Subscription implements ReloadableEntity { @SequenceGenerator(name = "subscription_seq", sequenceName = "subscription_seq", allocationSize = 1) private Integer id; - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "collection_id") - private Collection collection; + @ManyToOne(fetch = FetchType.EAGER) + @JoinColumn(name = "dspace_object_id") + private DSpaceObject dSpaceObject; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "eperson_id") private EPerson ePerson; /** - * Protected constructor, create object using: - * {@link org.dspace.eperson.service.SubscribeService#subscribe(Context, EPerson, Collection)} + * Represent subscription type, for example, "content" or "statistics". + * + * NOTE: Currently, in DSpace we use only one "content" */ - protected Subscription() { + @Column(name = "type") + private String subscriptionType; - } + @OneToMany(fetch = FetchType.LAZY, mappedBy = "subscription", cascade = CascadeType.ALL, orphanRemoval = true) + private List subscriptionParameterList = new ArrayList<>(); + /** + * Protected constructor, create object using: + * {@link org.dspace.eperson.service.SubscribeService#subscribe(Context, EPerson, DSpaceObject, List, String)} + */ + protected Subscription() {} + + @Override public Integer getID() { return id; } - public Collection getCollection() { - return collection; + public DSpaceObject getDSpaceObject() { + return this.dSpaceObject; } - void setCollection(Collection collection) { - this.collection = collection; + void setDSpaceObject(DSpaceObject dSpaceObject) { + this.dSpaceObject = dSpaceObject; } - public EPerson getePerson() { + public EPerson getEPerson() { return ePerson; } - void setePerson(EPerson ePerson) { + public void setEPerson(EPerson ePerson) { this.ePerson = ePerson; } -} + + public String getSubscriptionType() { + return subscriptionType; + } + + public void setSubscriptionType(String subscriptionType) { + this.subscriptionType = subscriptionType; + } + + public List getSubscriptionParameterList() { + return subscriptionParameterList; + } + + public void setSubscriptionParameterList(List subscriptionList) { + this.subscriptionParameterList = subscriptionList; + } + + public void addParameter(SubscriptionParameter subscriptionParameter) { + subscriptionParameterList.add(subscriptionParameter); + subscriptionParameter.setSubscription(this); + } + + public void removeParameterList() { + subscriptionParameterList.clear(); + } + + public void removeParameter(SubscriptionParameter subscriptionParameter) { + subscriptionParameterList.remove(subscriptionParameter); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/SubscriptionParameter.java b/dspace-api/src/main/java/org/dspace/eperson/SubscriptionParameter.java new file mode 100644 index 000000000000..7526535d7fcd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/SubscriptionParameter.java @@ -0,0 +1,98 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; + +import org.dspace.core.ReloadableEntity; + +/** + * Database entity representation of the subscription_parameter table + * SubscriptionParameter represents a frequency with which an user wants to be notified. + * + * @author Alba Aliu at atis.al + */ +@Entity +@Table(name = "subscription_parameter") +public class SubscriptionParameter implements ReloadableEntity { + + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "subscription_parameter_seq") + @SequenceGenerator(name = "subscription_parameter_seq", sequenceName = "subscription_parameter_seq", + allocationSize = 1) + @Column(name = "subscription_parameter_id", unique = true) + private Integer id; + + @ManyToOne + @JoinColumn(name = "subscription_id", nullable = false) + private Subscription subscription; + + /* + * Currently, we have only one use case for this attribute: "frequency" + */ + @Column + private String name; + + /* + * Currently, we use this attribute only with following values: "D", "W", "M". + * Where D stand for Day, W stand for Week and M stand for Month + */ + @Column + private String value; + + public SubscriptionParameter() {} + + public SubscriptionParameter(Integer id, Subscription subscription, String name, String value) { + this.id = id; + this.subscription = subscription; + this.name = name; + this.value = value; + } + + public Subscription getSubscription() { + return subscription; + } + + public void setSubscription(Subscription subscription) { + this.subscription = subscription; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + @Override + public Integer getID() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/SupervisorServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/SupervisorServiceImpl.java deleted file mode 100644 index 64180a5e2231..000000000000 --- a/dspace-api/src/main/java/org/dspace/eperson/SupervisorServiceImpl.java +++ /dev/null @@ -1,93 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.eperson; - -import java.sql.SQLException; - -import org.dspace.authorize.AuthorizeException; -import org.dspace.authorize.ResourcePolicy; -import org.dspace.authorize.service.ResourcePolicyService; -import org.dspace.content.Item; -import org.dspace.content.WorkspaceItem; -import org.dspace.content.service.ItemService; -import org.dspace.core.Constants; -import org.dspace.core.Context; -import org.dspace.eperson.service.SupervisorService; -import org.springframework.beans.factory.annotation.Autowired; - -public class SupervisorServiceImpl implements SupervisorService { - - @Autowired(required = true) - protected ItemService itemService; - @Autowired(required = true) - protected ResourcePolicyService resourcePolicyService; - - protected SupervisorServiceImpl() { - } - - @Override - public boolean isOrder(Context context, WorkspaceItem workspaceItem, Group group) - throws SQLException { - return workspaceItem.getSupervisorGroups().contains(group); - } - - @Override - public void remove(Context context, WorkspaceItem workspaceItem, Group group) - throws SQLException, AuthorizeException { - // get the workspace item and the group from the request values - workspaceItem.getSupervisorGroups().remove(group); - - // get the item and have it remove the policies for the group - Item item = workspaceItem.getItem(); - itemService.removeGroupPolicies(context, item, group); - } - - @Override - public void add(Context context, Group group, WorkspaceItem workspaceItem, int policy) - throws SQLException, AuthorizeException { - // make a table row in the database table, and update with the relevant - // details - workspaceItem.getSupervisorGroups().add(group); - group.getSupervisedItems().add(workspaceItem); - - // If a default policy type has been requested, apply the policies using - // the DSpace API for doing so - if (policy != POLICY_NONE) { - Item item = workspaceItem.getItem(); - - // "Editor" implies READ, WRITE, ADD permissions - // "Observer" implies READ permissions - if (policy == POLICY_EDITOR) { - ResourcePolicy r = resourcePolicyService.create(context); - r.setdSpaceObject(item); - r.setGroup(group); - r.setAction(Constants.READ); - resourcePolicyService.update(context, r); - - r = resourcePolicyService.create(context); - r.setdSpaceObject(item); - r.setGroup(group); - r.setAction(Constants.WRITE); - resourcePolicyService.update(context, r); - - r = resourcePolicyService.create(context); - r.setdSpaceObject(item); - r.setGroup(group); - r.setAction(Constants.ADD); - resourcePolicyService.update(context, r); - - } else if (policy == POLICY_OBSERVER) { - ResourcePolicy r = resourcePolicyService.create(context); - r.setdSpaceObject(item); - r.setGroup(group); - r.setAction(Constants.READ); - resourcePolicyService.update(context, r); - } - } - } -} diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java index 51ab89ef7e8f..f7543570dffb 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/EPersonDAO.java @@ -33,12 +33,91 @@ public interface EPersonDAO extends DSpaceObjectDAO, DSpaceObjectLegacy public EPerson findByNetid(Context context, String netid) throws SQLException; + /** + * Search all EPersons by the given MetadataField objects, sorting by the given sort fields. + *

    + * NOTE: As long as a query is specified, the EPerson's email address is included in the search alongside any given + * metadata fields. + * + * @param context DSpace context + * @param query the text to search EPersons for + * @param queryFields the metadata fields to search within (email is also included automatically) + * @param sortFields the metadata field(s) to sort the results by + * @param offset the position of the first result to return + * @param limit how many results return + * @return List of matching EPerson objects + * @throws SQLException if an error occurs + */ public List search(Context context, String query, List queryFields, List sortFields, int offset, int limit) throws SQLException; + /** + * Count number of EPersons who match a search on the given metadata fields. This returns the count of total + * results for the same query using the 'search()', and therefore can be used to provide pagination. + * + * @param context DSpace context + * @param query the text to search EPersons for + * @param queryFields the metadata fields to search within (email is also included automatically) + * @return total number of EPersons who match the query + * @throws SQLException if an error occurs + */ public int searchResultCount(Context context, String query, List queryFields) throws SQLException; - public List findByGroups(Context context, Set groups) throws SQLException; + /** + * Search all EPersons via their firstname, lastname, email (fuzzy match), limited to those EPersons which are NOT + * a member of the given group. This may be used to search across EPersons which are valid to add as members to the + * given group. + * + * @param context The DSpace context + * @param query the text to search EPersons for + * @param queryFields the metadata fields to search within (email is also included automatically) + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @param offset the position of the first result to return + * @param limit how many results return + * @return EPersons matching the query (which are not members of the given group) + * @throws SQLException if database error + */ + List searchNotMember(Context context, String query, List queryFields, Group excludeGroup, + List sortFields, int offset, int limit) throws SQLException; + + /** + * Count number of EPersons that match a given search (fuzzy match) across firstname, lastname and email. This + * search is limited to those EPersons which are NOT a member of the given group. This may be used + * (with searchNotMember()) to perform a paginated search across EPersons which are valid to add to the given group. + * + * @param context The DSpace context + * @param query querystring to fuzzy match against. + * @param queryFields the metadata fields to search within (email is also included automatically) + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @return Groups matching the query (which are not members of the given parent) + * @throws SQLException if database error + */ + int searchNotMemberCount(Context context, String query, List queryFields, Group excludeGroup) + throws SQLException; + + /** + * Find all EPersons who are a member of one or more of the listed groups in a paginated fashion. This returns + * EPersons ordered by UUID. + * + * @param context current Context + * @param groups Set of group(s) to check membership in + * @param pageSize number of EPerson objects to load at one time. Set to <=0 to disable pagination + * @param offset number of page to load (starting with 1). Set to <=0 to disable pagination + * @return List of all EPersons who are a member of one or more groups. + * @throws SQLException + */ + List findByGroups(Context context, Set groups, int pageSize, int offset) throws SQLException; + + /** + * Count total number of EPersons who are a member of one or more of the listed groups. This provides the total + * number of results to expect from corresponding findByGroups() for pagination purposes. + * + * @param context current Context + * @param groups Set of group(s) to check membership in + * @return total number of (unique) EPersons who are a member of one or more groups. + * @throws SQLException + */ + int countByGroups(Context context, Set groups) throws SQLException; public List findWithPasswordWithoutDigestAlgorithm(Context context) throws SQLException; diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java index ab37aa4047d1..9742e1611e5a 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/GroupDAO.java @@ -45,7 +45,7 @@ List findByMetadataField(Context context, String searchValue, MetadataFie * Find all groups ordered by the specified metadata fields ascending * * @param context The DSpace context - * @param sortMetadataFields The metadata fields to sort on + * @param metadataSortFields The metadata fields to sort on * @param pageSize how many results return * @param offset the position of the first result to return * @return A list of all groups, ordered by metadata fields @@ -135,6 +135,38 @@ List findAll(Context context, List metadataSortFields, int */ int countByNameLike(Context context, String groupName) throws SQLException; + /** + * Search all groups via their name (fuzzy match), limited to those groups which are NOT a member of the given + * parent group. This may be used to search across groups which are valid to add to the given parent group. + *

    + * NOTE: The parent group itself is also excluded from the search. + * + * @param context The DSpace context + * @param groupName Group name to fuzzy match against. + * @param excludeParent Parent Group to exclude results from. Groups under this parent will never be returned. + * @param offset Offset to use for pagination (-1 to disable) + * @param limit The maximum number of results to return (-1 to disable) + * @return Groups matching the query (which are not members of the given parent) + * @throws SQLException if database error + */ + List findByNameLikeAndNotMember(Context context, String groupName, Group excludeParent, + int offset, int limit) throws SQLException; + + /** + * Count number of groups that match a given name (fuzzy match), limited to those groups which are NOT a member of + * the given parent group. This may be used (with findByNameLikeAndNotMember()) to search across groups which are + * valid to add to the given parent group. + *

    + * NOTE: The parent group itself is also excluded from the count. + * + * @param context The DSpace context + * @param groupName Group name to fuzzy match against. + * @param excludeParent Parent Group to exclude results from. Groups under this parent will never be returned. + * @return Groups matching the query (which are not members of the given parent) + * @throws SQLException if database error + */ + int countByNameLikeAndNotMember(Context context, String groupName, Group excludeParent) throws SQLException; + /** * Find a group by its name and the membership of the given EPerson * @@ -146,4 +178,28 @@ List findAll(Context context, List metadataSortFields, int */ Group findByIdAndMembership(Context context, UUID id, EPerson ePerson) throws SQLException; + /** + * Find all groups which are members of a given parent group. + * This provides the same behavior as group.getMemberGroups(), but in a paginated fashion. + * + * @param context The DSpace context + * @param parent Parent Group to search within + * @param pageSize how many results return + * @param offset the position of the first result to return + * @return Groups matching the query + * @throws SQLException if database error + */ + List findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException; + + /** + * Returns the number of groups which are members of a given parent group. + * This provides the same behavior as group.getMemberGroups().size(), but with better performance for large groups. + * This method may be used with findByParent() to perform pagination. + * + * @param context The DSpace context + * @param parent Parent Group to search within + * @return Number of Groups matching the query + * @throws SQLException if database error + */ + int countByParent(Context context, Group parent) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/SubscriptionDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/SubscriptionDAO.java index e9f2d5705900..4d762c1775dd 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/SubscriptionDAO.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/SubscriptionDAO.java @@ -10,7 +10,7 @@ import java.sql.SQLException; import java.util.List; -import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; import org.dspace.core.Context; import org.dspace.core.GenericDAO; import org.dspace.eperson.EPerson; @@ -26,17 +26,125 @@ */ public interface SubscriptionDAO extends GenericDAO { - public void deleteByCollection(Context context, Collection collection) throws SQLException; + /** + * Delete all subscription of provided dSpaceObject + * + * @param context DSpace context object + * @param dSpaceObject DSpace resource + * @throws SQLException If database error + */ + public void deleteByDspaceObject(Context context, DSpaceObject dSpaceObject) throws SQLException; - public List findByEPerson(Context context, EPerson eperson) throws SQLException; + /** + * Return a paginated list of all subscriptions of the eperson + * + * @param context DSpace context object + * @param eperson ePerson whose subscriptions want to find + * @param limit Paging limit + * @param offset The position of the first result to return + * @return + * @throws SQLException If database error + */ + public List findByEPerson(Context context, EPerson eperson, Integer limit, Integer offset) + throws SQLException; - public Subscription findByCollectionAndEPerson(Context context, EPerson eperson, Collection collection) - throws SQLException; + /** + * Return a paginated list of subscriptions related to a DSpaceObject belong to an ePerson + * + * @param context DSpace context object + * @param eperson ePerson whose subscriptions want to find + * @param dSpaceObject DSpaceObject of whom subscriptions want to find + * @param limit Paging limit + * @param offset The position of the first result to return + * @return + * @throws SQLException If database error + */ + public List findByEPersonAndDso(Context context, EPerson eperson, DSpaceObject dSpaceObject, + Integer limit, Integer offset) throws SQLException; + /** + * Delete all subscription of provided ePerson + * + * @param context DSpace context object + * @param eperson ePerson whose subscriptions want to delete + * @throws SQLException If database error + */ public void deleteByEPerson(Context context, EPerson eperson) throws SQLException; - public void deleteByCollectionAndEPerson(Context context, Collection collection, EPerson eperson) - throws SQLException; + /** + * Delete all subscriptions related to a DSpaceObject belong to an ePerson + * + * @param context DSpace context object + * @param dSpaceObject DSpaceObject of whom subscriptions want to delete + * @param eperson ePerson whose subscriptions want to delete + * @throws SQLException If database error + */ + public void deleteByDSOAndEPerson(Context context, DSpaceObject dSpaceObject, EPerson eperson) throws SQLException; + + /** + * Return a paginated list of all subscriptions ordered by ID and resourceType + * + * @param context DSpace context object + * @param resourceType Could be Collection or Community + * @param limit Paging limit + * @param offset The position of the first result to return + * @return + * @throws SQLException If database error + */ + public List findAllOrderedByIDAndResourceType(Context context, String resourceType, + Integer limit, Integer offset) throws SQLException; + + /** + * Return a paginated list of subscriptions ordered by DSpaceObject + * + * @param context DSpace context object + * @param limit Paging limit + * @param offset The position of the first result to return + * @return + * @throws SQLException If database error + */ + public List findAllOrderedByDSO(Context context, Integer limit, Integer offset) throws SQLException; + + /** + * Return a list of all subscriptions by subscriptionType and frequency + * + * @param context DSpace context object + * @param subscriptionType Could be "content" or "statistics". NOTE: in DSpace we have only "content" + * @param frequencyValue Could be "D" stand for Day, "W" stand for Week, and "M" stand for Month + * @return + * @throws SQLException If database error + */ + public List findAllSubscriptionsBySubscriptionTypeAndFrequency(Context context, + String subscriptionType, String frequencyValue) throws SQLException; + + /** + * Count all subscriptions + * + * @param context DSpace context object + * @return Total of all subscriptions + * @throws SQLException If database error + */ + public Long countAll(Context context) throws SQLException; + + /** + * Count all subscriptions belong to an ePerson + * + * @param context DSpace context object + * @param ePerson ePerson whose subscriptions want count + * @return Total of all subscriptions belong to an ePerson + * @throws SQLException If database error + */ + public Long countAllByEPerson(Context context, EPerson ePerson) throws SQLException; + + /** + * Count all subscriptions related to a DSpaceObject belong to an ePerson + * + * @param context DSpace context object + * @param ePerson ePerson whose subscriptions want count + * @param dSpaceObject DSpaceObject of whom subscriptions want count + * @return + * @throws SQLException If database error + */ + public Long countAllByEPersonAndDso(Context context, EPerson ePerson,DSpaceObject dSpaceObject) throws SQLException; - public List findAllOrderedByEPerson(Context context) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/SubscriptionParameterDAO.java b/dspace-api/src/main/java/org/dspace/eperson/dao/SubscriptionParameterDAO.java new file mode 100644 index 000000000000..ea9c7b0bbd37 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/SubscriptionParameterDAO.java @@ -0,0 +1,22 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson.dao; +import org.dspace.core.GenericDAO; +import org.dspace.eperson.SubscriptionParameter; + + +/** + * Database Access Object interface class for the SubscriptionParamter object. + * The implementation of this class is responsible for all database calls for the SubscriptionParameter object and is + * autowired by spring + * This class should only be accessed from a single service and should never be exposed outside of the API + * + * @author Alba Aliu at atis.al + */ +public interface SubscriptionParameterDAO extends GenericDAO { +} diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java index fd4c6f59d9b0..87d6c5869b09 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/EPersonDAOImpl.java @@ -33,7 +33,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the EPerson object. - * This class is responsible for all database calls for the EPerson object and is autowired by spring + * This class is responsible for all database calls for the EPerson object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -50,7 +50,7 @@ public EPerson findByEmail(Context context, String email) throws SQLException { Root ePersonRoot = criteriaQuery.from(EPerson.class); criteriaQuery.select(ePersonRoot); criteriaQuery.where(criteriaBuilder.equal(ePersonRoot.get(EPerson_.email), email.toLowerCase())); - return uniqueResult(context, criteriaQuery, true, EPerson.class, -1, -1); + return uniqueResult(context, criteriaQuery, true, EPerson.class); } @@ -61,7 +61,7 @@ public EPerson findByNetid(Context context, String netid) throws SQLException { Root ePersonRoot = criteriaQuery.from(EPerson.class); criteriaQuery.select(ePersonRoot); criteriaQuery.where((criteriaBuilder.equal(ePersonRoot.get(EPerson_.netid), netid))); - return uniqueResult(context, criteriaQuery, true, EPerson.class, -1, -1); + return uniqueResult(context, criteriaQuery, true, EPerson.class); } @Override @@ -70,17 +70,9 @@ public List search(Context context, String query, List q String queryString = "SELECT " + EPerson.class.getSimpleName() .toLowerCase() + " FROM EPerson as " + EPerson.class .getSimpleName().toLowerCase() + " "; - if (query != null) { - query = "%" + query.toLowerCase() + "%"; - } - Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, sortFields, null); - if (0 <= offset) { - hibernateQuery.setFirstResult(offset); - } - if (0 <= limit) { - hibernateQuery.setMaxResults(limit); - } + Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, null, + sortFields, null, limit, offset); return list(hibernateQuery); } @@ -92,6 +84,28 @@ public int searchResultCount(Context context, String query, List return count(hibernateQuery); } + @Override + public List searchNotMember(Context context, String query, List queryFields, + Group excludeGroup, List sortFields, + int offset, int limit) throws SQLException { + String queryString = "SELECT " + EPerson.class.getSimpleName() + .toLowerCase() + " FROM EPerson as " + EPerson.class + .getSimpleName().toLowerCase() + " "; + + Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, excludeGroup, + sortFields, null, limit, offset); + return list(hibernateQuery); + } + + public int searchNotMemberCount(Context context, String query, List queryFields, + Group excludeGroup) throws SQLException { + String queryString = "SELECT count(*) FROM EPerson as " + EPerson.class.getSimpleName().toLowerCase(); + + Query hibernateQuery = getSearchQuery(context, queryString, query, queryFields, excludeGroup, + Collections.EMPTY_LIST, null, -1, -1); + return count(hibernateQuery); + } + @Override public List findAll(Context context, MetadataField metadataSortField, String sortField, int pageSize, int offset) throws SQLException { @@ -105,14 +119,15 @@ public List findAll(Context context, MetadataField metadataSortField, S sortFields = Collections.singletonList(metadataSortField); } - Query query = getSearchQuery(context, queryString, null, ListUtils.EMPTY_LIST, sortFields, sortField, pageSize, - offset); + Query query = getSearchQuery(context, queryString, null, ListUtils.EMPTY_LIST, null, + sortFields, sortField, pageSize, offset); return list(query); } @Override - public List findByGroups(Context context, Set groups) throws SQLException { + public List findByGroups(Context context, Set groups, int pageSize, int offset) + throws SQLException { Query query = createQuery(context, "SELECT DISTINCT e FROM EPerson e " + "JOIN e.groups g " + @@ -122,12 +137,35 @@ public List findByGroups(Context context, Set groups) throws SQL for (Group group : groups) { idList.add(group.getID()); } - query.setParameter("idList", idList); + if (pageSize > 0) { + query.setMaxResults(pageSize); + } + if (offset > 0) { + query.setFirstResult(offset); + } + return list(query); } + @Override + public int countByGroups(Context context, Set groups) throws SQLException { + Query query = createQuery(context, + "SELECT count(DISTINCT e) FROM EPerson e " + + "JOIN e.groups g " + + "WHERE g.id IN (:idList) "); + + List idList = new ArrayList<>(groups.size()); + for (Group group : groups) { + idList.add(group.getID()); + } + + query.setParameter("idList", idList); + + return count(query); + } + @Override public List findWithPasswordWithoutDigestAlgorithm(Context context) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); @@ -154,43 +192,88 @@ public List findNotActiveSince(Context context, Date date) throws SQLEx protected Query getSearchQuery(Context context, String queryString, String queryParam, List queryFields, List sortFields, String sortField) throws SQLException { - return getSearchQuery(context, queryString, queryParam, queryFields, sortFields, sortField, -1, -1); + return getSearchQuery(context, queryString, queryParam, queryFields, null, sortFields, sortField, -1, -1); } + /** + * Build a search query across EPersons based on the given metadata fields and sorted based on the given metadata + * field(s) or database column. + *

    + * NOTE: the EPerson's email address is included in the search alongside any given metadata fields. + * + * @param context DSpace Context + * @param queryString String which defines the beginning "SELECT" for the SQL query + * @param queryParam Actual text being searched for + * @param queryFields List of metadata fields to search within + * @param excludeGroup Optional Group which should be excluded from search. Any EPersons who are members + * of this group will not be included in the results. + * @param sortFields Optional List of metadata fields to sort by (should not be specified if sortField is used) + * @param sortField Optional database column to sort on (should not be specified if sortFields is used) + * @param pageSize how many results return + * @param offset the position of the first result to return + * @return built Query object + * @throws SQLException if error occurs + */ protected Query getSearchQuery(Context context, String queryString, String queryParam, - List queryFields, List sortFields, String sortField, - int pageSize, int offset) throws SQLException { - + List queryFields, Group excludeGroup, + List sortFields, String sortField, + int pageSize, int offset) throws SQLException { + // Initialize SQL statement using the passed in "queryString" StringBuilder queryBuilder = new StringBuilder(); queryBuilder.append(queryString); + Set metadataFieldsToJoin = new LinkedHashSet<>(); metadataFieldsToJoin.addAll(queryFields); metadataFieldsToJoin.addAll(sortFields); + // Append necessary join information for MetadataFields we will search within if (!CollectionUtils.isEmpty(metadataFieldsToJoin)) { addMetadataLeftJoin(queryBuilder, EPerson.class.getSimpleName().toLowerCase(), metadataFieldsToJoin); } - if (queryParam != null) { + // Always append a search on EPerson "email" based on query + if (StringUtils.isNotBlank(queryParam)) { addMetadataValueWhereQuery(queryBuilder, queryFields, "like", EPerson.class.getSimpleName().toLowerCase() + ".email like :queryParam"); } + // If excludeGroup is specified, exclude members of that group from results + // This uses a subquery to find the excluded group & verify that it is not in the EPerson list of "groups" + if (excludeGroup != null) { + // If query params exist, then we already have a WHERE clause (see above) and just need to append an AND + if (StringUtils.isNotBlank(queryParam)) { + queryBuilder.append(" AND "); + } else { + // no WHERE clause yet, so this is the start of the WHERE + queryBuilder.append(" WHERE "); + } + queryBuilder.append("(FROM Group g where g.id = :group_id) NOT IN elements (") + .append(EPerson.class.getSimpleName().toLowerCase()).append(".groups)"); + } + // Add sort/order by info to query, if specified if (!CollectionUtils.isEmpty(sortFields) || StringUtils.isNotBlank(sortField)) { addMetadataSortQuery(queryBuilder, sortFields, Collections.singletonList(sortField)); } + // Create the final SQL SELECT statement (based on included params above) Query query = createQuery(context, queryBuilder.toString()); + // Set pagesize & offset for pagination if (pageSize > 0) { query.setMaxResults(pageSize); } if (offset > 0) { query.setFirstResult(offset); } + // Set all parameters to the SQL SELECT statement (based on included params above) if (StringUtils.isNotBlank(queryParam)) { query.setParameter("queryParam", "%" + queryParam.toLowerCase() + "%"); } for (MetadataField metadataField : metadataFieldsToJoin) { query.setParameter(metadataField.toString(), metadataField.getID()); } + if (excludeGroup != null) { + query.setParameter("group_id", excludeGroup.getID()); + } + + query.setHint("org.hibernate.cacheable", Boolean.TRUE); return query; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/Group2GroupCacheDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/Group2GroupCacheDAOImpl.java index 717b41e8b974..83fb48aaf03d 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/Group2GroupCacheDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/Group2GroupCacheDAOImpl.java @@ -25,7 +25,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the Group2GroupCache object. - * This class is responsible for all database calls for the Group2GroupCache object and is autowired by spring + * This class is responsible for all database calls for the Group2GroupCache object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -83,7 +83,7 @@ public Group2GroupCache find(Context context, Group parent, Group child) throws criteriaBuilder.equal(group2GroupCacheRoot.get(Group2GroupCache_.child), child) ) ); - return uniqueResult(context, criteriaQuery, true, Group2GroupCache.class, -1, -1); + return uniqueResult(context, criteriaQuery, true, Group2GroupCache.class); } @Override diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java index edc2ab749bfa..6aea9ecd8d67 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/GroupDAOImpl.java @@ -164,6 +164,41 @@ public int countByNameLike(final Context context, final String groupName) throws return count(query); } + @Override + public List findByNameLikeAndNotMember(Context context, String groupName, Group excludeParent, + int offset, int limit) throws SQLException { + Query query = createQuery(context, + "FROM Group " + + "WHERE lower(name) LIKE lower(:group_name) " + + "AND id != :parent_id " + + "AND (from Group g where g.id = :parent_id) not in elements (parentGroups)"); + query.setParameter("parent_id", excludeParent.getID()); + query.setParameter("group_name", "%" + StringUtils.trimToEmpty(groupName) + "%"); + + if (0 <= offset) { + query.setFirstResult(offset); + } + if (0 <= limit) { + query.setMaxResults(limit); + } + query.setHint("org.hibernate.cacheable", Boolean.TRUE); + + return list(query); + } + + @Override + public int countByNameLikeAndNotMember(Context context, String groupName, Group excludeParent) throws SQLException { + Query query = createQuery(context, + "SELECT count(*) FROM Group " + + "WHERE lower(name) LIKE lower(:group_name) " + + "AND id != :parent_id " + + "AND (from Group g where g.id = :parent_id) not in elements (parentGroups)"); + query.setParameter("parent_id", excludeParent.getID()); + query.setParameter("group_name", "%" + StringUtils.trimToEmpty(groupName) + "%"); + + return count(query); + } + @Override public void delete(Context context, Group group) throws SQLException { Query query = getHibernateSession(context) @@ -196,4 +231,29 @@ public int countRows(Context context) throws SQLException { return count(createQuery(context, "SELECT count(*) FROM Group")); } + @Override + public List findByParent(Context context, Group parent, int pageSize, int offset) throws SQLException { + Query query = createQuery(context, + "SELECT g FROM Group g JOIN g.parentGroups pg " + + "WHERE pg.id = :parent_id"); + query.setParameter("parent_id", parent.getID()); + if (pageSize > 0) { + query.setMaxResults(pageSize); + } + if (offset > 0) { + query.setFirstResult(offset); + } + query.setHint("org.hibernate.cacheable", Boolean.TRUE); + + return list(query); + } + + @Override + public int countByParent(Context context, Group parent) throws SQLException { + Query query = createQuery(context, "SELECT count(g) FROM Group g JOIN g.parentGroups pg " + + "WHERE pg.id = :parent_id"); + query.setParameter("parent_id", parent.getID()); + + return count(query); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataDAOImpl.java index 35fda4b62fbf..4a15dcc86796 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/RegistrationDataDAOImpl.java @@ -21,7 +21,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the RegistrationData object. - * This class is responsible for all database calls for the RegistrationData object and is autowired by spring + * This class is responsible for all database calls for the RegistrationData object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -39,7 +39,7 @@ public RegistrationData findByEmail(Context context, String email) throws SQLExc Root registrationDataRoot = criteriaQuery.from(RegistrationData.class); criteriaQuery.select(registrationDataRoot); criteriaQuery.where(criteriaBuilder.equal(registrationDataRoot.get(RegistrationData_.email), email)); - return uniqueResult(context, criteriaQuery, false, RegistrationData.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, RegistrationData.class); } @Override @@ -49,7 +49,7 @@ public RegistrationData findByToken(Context context, String token) throws SQLExc Root registrationDataRoot = criteriaQuery.from(RegistrationData.class); criteriaQuery.select(registrationDataRoot); criteriaQuery.where(criteriaBuilder.equal(registrationDataRoot.get(RegistrationData_.token), token)); - return uniqueResult(context, criteriaQuery, false, RegistrationData.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, RegistrationData.class); } @Override diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/SubscriptionDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/SubscriptionDAOImpl.java index a90c5da5a11c..6c36211f310c 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/SubscriptionDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/SubscriptionDAOImpl.java @@ -8,18 +8,22 @@ package org.dspace.eperson.dao.impl; import java.sql.SQLException; +import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import javax.persistence.Query; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Join; import javax.persistence.criteria.Root; -import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.Subscription; +import org.dspace.eperson.SubscriptionParameter; +import org.dspace.eperson.SubscriptionParameter_; import org.dspace.eperson.Subscription_; import org.dspace.eperson.dao.SubscriptionDAO; @@ -31,42 +35,50 @@ * @author kevinvandevelde at atmire.com */ public class SubscriptionDAOImpl extends AbstractHibernateDAO implements SubscriptionDAO { + protected SubscriptionDAOImpl() { super(); } @Override - public List findByEPerson(Context context, EPerson eperson) throws SQLException { + public List findByEPerson(Context context, EPerson eperson, Integer limit, Integer offset) + throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); javax.persistence.criteria.CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Subscription.class); Root subscriptionRoot = criteriaQuery.from(Subscription.class); criteriaQuery.select(subscriptionRoot); criteriaQuery.where(criteriaBuilder.equal(subscriptionRoot.get(Subscription_.ePerson), eperson)); - return list(context, criteriaQuery, false, Subscription.class, -1, -1); - + List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.asc(subscriptionRoot.get(Subscription_.dSpaceObject))); + criteriaQuery.orderBy(orderList); + return list(context, criteriaQuery, false, Subscription.class, limit, offset); } @Override - public Subscription findByCollectionAndEPerson(Context context, EPerson eperson, Collection collection) - throws SQLException { + public List findByEPersonAndDso(Context context, EPerson eperson, + DSpaceObject dSpaceObject, + Integer limit, Integer offset) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - javax.persistence.criteria.CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Subscription.class); + javax.persistence.criteria.CriteriaQuery criteriaQuery = + getCriteriaQuery(criteriaBuilder, Subscription.class); Root subscriptionRoot = criteriaQuery.from(Subscription.class); criteriaQuery.select(subscriptionRoot); - criteriaQuery - .where(criteriaBuilder.and(criteriaBuilder.equal(subscriptionRoot.get(Subscription_.ePerson), eperson), - criteriaBuilder.equal(subscriptionRoot.get(Subscription_.collection), collection) - ) - ); - return singleResult(context, criteriaQuery); + criteriaQuery.where(criteriaBuilder.and(criteriaBuilder.equal( + subscriptionRoot.get(Subscription_.ePerson), eperson), + criteriaBuilder.equal(subscriptionRoot.get(Subscription_.dSpaceObject), dSpaceObject) + )); + List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.asc(subscriptionRoot.get(Subscription_.dSpaceObject))); + criteriaQuery.orderBy(orderList); + return list(context, criteriaQuery, false, Subscription.class, limit, offset); } @Override - public void deleteByCollection(Context context, Collection collection) throws SQLException { - String hqlQuery = "delete from Subscription where collection=:collection"; + public void deleteByDspaceObject(Context context, DSpaceObject dSpaceObject) throws SQLException { + String hqlQuery = "delete from Subscription where dSpaceObject=:dSpaceObject"; Query query = createQuery(context, hqlQuery); - query.setParameter("collection", collection); + query.setParameter("dSpaceObject", dSpaceObject); query.executeUpdate(); } @@ -79,28 +91,98 @@ public void deleteByEPerson(Context context, EPerson eperson) throws SQLExceptio } @Override - public void deleteByCollectionAndEPerson(Context context, Collection collection, EPerson eperson) - throws SQLException { - String hqlQuery = "delete from Subscription where collection=:collection AND ePerson=:ePerson"; + public void deleteByDSOAndEPerson(Context context, DSpaceObject dSpaceObject, EPerson eperson) + throws SQLException { + String hqlQuery = "delete from Subscription where dSpaceObject=:dSpaceObject AND ePerson=:ePerson"; Query query = createQuery(context, hqlQuery); - query.setParameter("collection", collection); + query.setParameter("dSpaceObject", dSpaceObject); query.setParameter("ePerson", eperson); query.executeUpdate(); } @Override - public List findAllOrderedByEPerson(Context context) throws SQLException { - + public List findAllOrderedByIDAndResourceType(Context context, String resourceType, + Integer limit, Integer offset) throws SQLException { + String hqlQuery = "select s from Subscription s join %s dso " + + "ON dso.id = s.dSpaceObject ORDER BY subscription_id"; + if (resourceType != null) { + hqlQuery = String.format(hqlQuery, resourceType); + } + Query query = createQuery(context, hqlQuery); + if (limit != -1) { + query.setMaxResults(limit); + } + if (offset != -1) { + query.setFirstResult(offset); + } + query.setHint("org.hibernate.cacheable", false); + return query.getResultList(); + } + @Override + public List findAllOrderedByDSO(Context context, Integer limit, Integer offset) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Subscription.class); Root subscriptionRoot = criteriaQuery.from(Subscription.class); criteriaQuery.select(subscriptionRoot); - List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.asc(subscriptionRoot.get(Subscription_.dSpaceObject))); + criteriaQuery.orderBy(orderList); + return list(context, criteriaQuery, false, Subscription.class, limit, offset); + } + + @Override + public List findAllSubscriptionsBySubscriptionTypeAndFrequency(Context context, + String subscriptionType, String frequencyValue) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Subscription.class); + Root subscriptionRoot = criteriaQuery.from(Subscription.class); + criteriaQuery.select(subscriptionRoot); + Join childJoin = subscriptionRoot.join("subscriptionParameterList"); + criteriaQuery.where( + criteriaBuilder.and( + criteriaBuilder.equal(subscriptionRoot.get(Subscription_.SUBSCRIPTION_TYPE), subscriptionType), + criteriaBuilder.equal(childJoin.get(SubscriptionParameter_.name), "frequency"), + criteriaBuilder.equal(childJoin.get(SubscriptionParameter_.value), frequencyValue) + )); + List orderList = new ArrayList<>(1); orderList.add(criteriaBuilder.asc(subscriptionRoot.get(Subscription_.ePerson))); + orderList.add(criteriaBuilder.asc(subscriptionRoot.get(Subscription_.id))); criteriaQuery.orderBy(orderList); + return list(context, criteriaQuery, false, Subscription.class, 10000, -1); + } + + @Override + public Long countAll(Context context) throws SQLException { + CriteriaBuilder qb = getCriteriaBuilder(context); + CriteriaQuery cq = qb.createQuery(Long.class); + cq.select(qb.count(cq.from(Subscription.class))); + Query query = this.getHibernateSession(context).createQuery(cq); + return (Long) query.getSingleResult(); + } - return list(context, criteriaQuery, false, Subscription.class, -1, -1); + @Override + public Long countAllByEPerson(Context context, EPerson ePerson) throws SQLException { + CriteriaBuilder qb = getCriteriaBuilder(context); + CriteriaQuery cq = qb.createQuery(Long.class); + Root subscriptionRoot = cq.from(Subscription.class); + cq.select(qb.count(subscriptionRoot)); + cq.where(qb.equal(subscriptionRoot.get(Subscription_.ePerson), ePerson)); + Query query = this.getHibernateSession(context).createQuery(cq); + return (Long) query.getSingleResult(); } + + @Override + public Long countAllByEPersonAndDso(Context context, + EPerson ePerson, DSpaceObject dSpaceObject) throws SQLException { + CriteriaBuilder qb = getCriteriaBuilder(context); + CriteriaQuery cq = qb.createQuery(Long.class); + Root subscriptionRoot = cq.from(Subscription.class); + cq.select(qb.count(subscriptionRoot)); + cq.where(qb.and(qb.equal(subscriptionRoot.get(Subscription_.ePerson) + , ePerson), qb.equal(subscriptionRoot.get(Subscription_.dSpaceObject), dSpaceObject))); + Query query = this.getHibernateSession(context).createQuery(cq); + return (Long) query.getSingleResult(); + } + } \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/dao/impl/SubscriptionParameterDAOImpl.java b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/SubscriptionParameterDAOImpl.java new file mode 100644 index 000000000000..37af787ed3a5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/dao/impl/SubscriptionParameterDAOImpl.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson.dao.impl; + +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.eperson.SubscriptionParameter; +import org.dspace.eperson.dao.SubscriptionParameterDAO; + +/** + * Hibernate implementation of the Database Access Object interface class for the SubscriptionParameter object. + * This class is responsible for all database calls for the SubscriptionParameter object and is autowired by spring + * This class should never be accessed directly. + * + * @author Alba Aliu at atis.al + */ +public class SubscriptionParameterDAOImpl extends AbstractHibernateDAO + implements SubscriptionParameterDAO { + + protected SubscriptionParameterDAOImpl() { + super(); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/factory/EPersonServiceFactory.java b/dspace-api/src/main/java/org/dspace/eperson/factory/EPersonServiceFactory.java index f7ce13a8a397..b80c37f13ff5 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/factory/EPersonServiceFactory.java +++ b/dspace-api/src/main/java/org/dspace/eperson/factory/EPersonServiceFactory.java @@ -12,7 +12,6 @@ import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.RegistrationDataService; import org.dspace.eperson.service.SubscribeService; -import org.dspace.eperson.service.SupervisorService; import org.dspace.services.factory.DSpaceServicesFactory; /** @@ -33,8 +32,6 @@ public abstract class EPersonServiceFactory { public abstract SubscribeService getSubscribeService(); - public abstract SupervisorService getSupervisorService(); - public static EPersonServiceFactory getInstance() { return DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName("ePersonServiceFactory", EPersonServiceFactory.class); diff --git a/dspace-api/src/main/java/org/dspace/eperson/factory/EPersonServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/eperson/factory/EPersonServiceFactoryImpl.java index 33d9249b6bfd..c4a6cbe9964c 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/factory/EPersonServiceFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/factory/EPersonServiceFactoryImpl.java @@ -12,7 +12,6 @@ import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.RegistrationDataService; import org.dspace.eperson.service.SubscribeService; -import org.dspace.eperson.service.SupervisorService; import org.springframework.beans.factory.annotation.Autowired; /** @@ -33,8 +32,6 @@ public class EPersonServiceFactoryImpl extends EPersonServiceFactory { private AccountService accountService; @Autowired(required = true) private SubscribeService subscribeService; - @Autowired(required = true) - private SupervisorService supervisorService; @Override public EPersonService getEPersonService() { @@ -61,8 +58,4 @@ public SubscribeService getSubscribeService() { return subscribeService; } - @Override - public SupervisorService getSupervisorService() { - return supervisorService; - } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java b/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java index 45fa6d26b1b2..c8ecb0cc67d4 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/AccountService.java @@ -46,11 +46,4 @@ public String getEmail(Context context, String token) public void deleteToken(Context context, String token) throws SQLException; - - /** - * This method verifies that a certain String adheres to the password rules for DSpace - * @param password The String to be checked - * @return A boolean indicating whether or not the given String adheres to the password rules - */ - public boolean verifyPasswordStructure(String password); } diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/CaptchaService.java b/dspace-api/src/main/java/org/dspace/eperson/service/CaptchaService.java new file mode 100644 index 000000000000..da417facc628 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/eperson/service/CaptchaService.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson.service; + +import org.dspace.eperson.InvalidReCaptchaException; + +/** + * This service for validate the reCaptcha token + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public interface CaptchaService { + + public String REGISTER_ACTION = "register_email"; + + /** + * validate the entered reCaptcha token + * + * @param response reCaptcha token to be validated + * @param action action of reCaptcha + * @throws InvalidReCaptchaException if reCaptcha was not successfully validated + */ + public void processResponse(String response, String action) throws InvalidReCaptchaException; + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java index 6d2dd67d76b7..2afec161a672 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/EPersonService.java @@ -13,8 +13,10 @@ import java.util.Date; import java.util.List; import java.util.Set; +import javax.validation.constraints.NotNull; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; import org.dspace.content.MetadataFieldName; import org.dspace.content.service.DSpaceObjectLegacySupportService; import org.dspace.content.service.DSpaceObjectService; @@ -96,9 +98,9 @@ public List search(Context context, String query) * * @param context The relevant DSpace Context. * @param query The search string - * @param offset Inclusive offset + * @param offset Inclusive offset (the position of the first result to return) * @param limit Maximum number of matches returned - * @return array of EPerson objects + * @return List of matching EPerson objects * @throws SQLException An exception that provides information on a database access error or other errors. */ public List search(Context context, String query, int offset, int limit) @@ -116,6 +118,34 @@ public List search(Context context, String query, int offset, int limit public int searchResultCount(Context context, String query) throws SQLException; + /** + * Find the EPersons that match the search query which are NOT currently members of the given Group. The search + * query is run against firstname, lastname or email. + * + * @param context DSpace context + * @param query The search string + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @param offset Inclusive offset (the position of the first result to return) + * @param limit Maximum number of matches returned + * @return List of matching EPerson objects + * @throws SQLException if error + */ + List searchNonMembers(Context context, String query, Group excludeGroup, + int offset, int limit) throws SQLException; + + /** + * Returns the total number of EPersons that match the search query which are NOT currently members of the given + * Group. The search query is run against firstname, lastname or email. Can be used with searchNonMembers() to + * support pagination + * + * @param context DSpace context + * @param query The search string + * @param excludeGroup Group to exclude results from. Members of this group will never be returned. + * @return List of matching EPerson objects + * @throws SQLException if error + */ + int searchNonMembersCount(Context context, String query, Group excludeGroup) throws SQLException; + /** * Find all the {@code EPerson}s in a specific order by field. * The sortable fields are: @@ -156,6 +186,19 @@ public List findAll(Context context, int sortField) public List findAll(Context context, int sortField, int pageSize, int offset) throws SQLException; + /** + * The "System EPerson" is a fake account that exists only to receive email. + * It has an email address that should be presumed usable. It does not + * exist in the database and is not complete. + * + * @param context current DSpace session. + * @return an EPerson that can presumably receive email. + * @throws SQLException + */ + @NotNull + public EPerson getSystemEPerson(Context context) + throws SQLException; + /** * Create a new eperson * @@ -237,14 +280,42 @@ public EPerson create(Context context) throws SQLException, public List getDeleteConstraints(Context context, EPerson ePerson) throws SQLException; /** - * Retrieve all accounts which belong to at least one of the specified groups. + * Retrieve all EPerson accounts which belong to at least one of the specified groups. + *

    + * WARNING: This method may have bad performance issues for Groups with a very large number of members, + * as it will load all member EPerson objects into memory. + *

    + * For better performance, use the paginated version of this method. * * @param c The relevant DSpace Context. * @param groups set of eperson groups * @return a list of epeople * @throws SQLException An exception that provides information on a database access error or other errors. */ - public List findByGroups(Context c, Set groups) throws SQLException; + List findByGroups(Context c, Set groups) throws SQLException; + + /** + * Retrieve all EPerson accounts which belong to at least one of the specified groups, in a paginated fashion. + * + * @param c The relevant DSpace Context. + * @param groups Set of group(s) to check membership in + * @param pageSize number of EPerson objects to load at one time. Set to <=0 to disable pagination + * @param offset number of page to load (starting with 1). Set to <=0 to disable pagination + * @return a list of epeople + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + List findByGroups(Context c, Set groups, int pageSize, int offset) throws SQLException; + + /** + * Count all EPerson accounts which belong to at least one of the specified groups. This provides the total + * number of results to expect from corresponding findByGroups() for pagination purposes. + * + * @param c The relevant DSpace Context. + * @param groups Set of group(s) to check membership in + * @return total number of (unique) EPersons who are a member of one or more groups. + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + int countByGroups(Context c, Set groups) throws SQLException; /** * Retrieve all accounts which are subscribed to receive information about new items. @@ -263,4 +334,16 @@ public EPerson create(Context context) throws SQLException, * @throws SQLException An exception that provides information on a database access error or other errors. */ int countTotal(Context context) throws SQLException; + + /** + * Find the EPerson related to the given profile item. If the given item is not + * a profile item, null is returned. + * + * @param context The relevant DSpace Context. + * @param profile the profile item to search for + * @return the EPerson, if any + * @throws SQLException An exception that provides information on a database + * access error or other errors. + */ + EPerson findByProfileItem(Context context, Item profile) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java index 8979bcc4457a..0be2f47a61eb 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/GroupService.java @@ -189,9 +189,11 @@ public interface GroupService extends DSpaceObjectService, DSpaceObjectLe Set allMemberGroupsSet(Context context, EPerson ePerson) throws SQLException; /** - * Get all of the epeople who are a member of the - * specified group, or a member of a sub-group of the + * Get all of the EPerson objects who are a member of the specified group, or a member of a subgroup of the * specified group, etc. + *

    + * WARNING: This method may have bad performance for Groups with a very large number of members, as it will load + * all member EPerson objects into memory. Only use if you need access to *every* EPerson object at once. * * @param context The relevant DSpace Context. * @param group Group object @@ -200,6 +202,18 @@ public interface GroupService extends DSpaceObjectService, DSpaceObjectLe */ public List allMembers(Context context, Group group) throws SQLException; + /** + * Count all of the EPerson objects who are a member of the specified group, or a member of a subgroup of the + * specified group, etc. + * In other words, this will return the size of "allMembers()" without having to load all EPerson objects into + * memory. + * @param context current DSpace context + * @param group Group object + * @return count of EPerson object members + * @throws SQLException if error + */ + int countAllMembers(Context context, Group group) throws SQLException; + /** * Find the group by its name - assumes name is unique * @@ -247,37 +261,67 @@ public List findAll(Context context, List metadataSortFiel public List findAll(Context context, int sortField) throws SQLException; /** - * Find the groups that match the search query across eperson_group_id or name + * Find the Groups that match the query across both Group name and Group ID. This is an unpaginated search, + * which means it will load all matching groups into memory at once. This may provide POOR PERFORMANCE when a large + * number of groups are matched. * - * @param context DSpace context - * @param groupIdentifier The group name or group ID - * @return array of Group objects + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @return List of matching Group objects * @throws SQLException if error */ - public List search(Context context, String groupIdentifier) throws SQLException; + List search(Context context, String query) throws SQLException; /** - * Find the groups that match the search query across eperson_group_id or name + * Find the Groups that match the query across both Group name and Group ID. This method supports pagination, + * which provides better performance than the above non-paginated search() method. * - * @param context DSpace context - * @param groupIdentifier The group name or group ID - * @param offset Inclusive offset - * @param limit Maximum number of matches returned - * @return array of Group objects + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @param offset Inclusive offset (the position of the first result to return) + * @param limit Maximum number of matches returned + * @return List of matching Group objects * @throws SQLException if error */ - public List search(Context context, String groupIdentifier, int offset, int limit) throws SQLException; + List search(Context context, String query, int offset, int limit) throws SQLException; /** - * Returns the total number of groups returned by a specific query, without the overhead - * of creating the Group objects to store the results. + * Returns the total number of Groups returned by a specific query. Search is performed based on Group name + * and Group ID. May be used with search() above to support pagination of matching Groups. * * @param context DSpace context - * @param query The search string + * @param query The search string used to search across group name or group ID * @return the number of groups matching the query * @throws SQLException if error */ - public int searchResultCount(Context context, String query) throws SQLException; + int searchResultCount(Context context, String query) throws SQLException; + + /** + * Find the groups that match the search query which are NOT currently members (subgroups) + * of the given parentGroup + * + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @param excludeParentGroup Parent group to exclude results from + * @param offset Inclusive offset (the position of the first result to return) + * @param limit Maximum number of matches returned + * @return List of matching Group objects + * @throws SQLException if error + */ + List searchNonMembers(Context context, String query, Group excludeParentGroup, + int offset, int limit) throws SQLException; + + /** + * Returns the total number of groups that match the search query which are NOT currently members (subgroups) + * of the given parentGroup. Can be used with searchNonMembers() to support pagination. + * + * @param context DSpace context + * @param query The search string used to search across group name or group ID + * @param excludeParentGroup Parent group to exclude results from + * @return the number of Groups matching the query + * @throws SQLException if error + */ + int searchNonMembersCount(Context context, String query, Group excludeParentGroup) throws SQLException; /** * Return true if group has no direct or indirect members @@ -327,4 +371,29 @@ public List findAll(Context context, List metadataSortFiel */ List findByMetadataField(Context context, String searchValue, MetadataField metadataField) throws SQLException; + + /** + * Find all groups which are a member of the given Parent group + * + * @param context The relevant DSpace Context. + * @param parent The parent Group to search on + * @param pageSize how many results return + * @param offset the position of the first result to return + * @return List of all groups which are members of the parent group + * @throws SQLException database exception if error + */ + List findByParent(Context context, Group parent, int pageSize, int offset) + throws SQLException; + + /** + * Return number of groups which are a member of the given Parent group. + * Can be used with findByParent() for pagination of all groups within a given Parent group. + * + * @param context The relevant DSpace Context. + * @param parent The parent Group to search on + * @return number of groups which are members of the parent group + * @throws SQLException database exception if error + */ + int countByParent(Context context, Group parent) + throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/SubscribeService.java b/dspace-api/src/main/java/org/dspace/eperson/service/SubscribeService.java index 347c69bf5b0e..e70f40e0edf0 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/service/SubscribeService.java +++ b/dspace-api/src/main/java/org/dspace/eperson/service/SubscribeService.java @@ -12,9 +12,11 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.Subscription; +import org.dspace.eperson.SubscriptionParameter; /** * Service interface class for the Subscription object. @@ -31,49 +33,74 @@ public interface SubscribeService { * new item appears in the collection. * * @param context DSpace context + * @param limit Number of subscriptions to return + * @param offset Offset number * @return list of Subscription objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public List findAll(Context context) throws SQLException; + public List findAll(Context context, String resourceType, Integer limit, Integer offset) + throws Exception; /** - * Subscribe an e-person to a collection. An e-mail will be sent every day a - * new item appears in the collection. - * - * @param context DSpace context - * @param eperson EPerson to subscribe - * @param collection Collection to subscribe to - * @throws SQLException An exception that provides information on a database access error or other errors. - * @throws AuthorizeException Exception indicating the current user of the context does not have permission - * to perform a particular action. + * Subscribe an EPerson to a dSpaceObject (Collection or Community). An e-mail will be sent every day a + * new item appears in the Collection or Community. + * + * @param context DSpace context object + * @param eperson EPerson to subscribe + * @param dSpaceObject DSpaceObject to subscribe + * @param subscriptionParameters list of @SubscriptionParameter + * @param subscriptionType Currently supported only "content" + * @return + * @throws SQLException An exception that provides information on a database access error or other errors. + * @throws AuthorizeException Exception indicating the current user of the context does not have permission + * to perform a particular action. */ - public void subscribe(Context context, EPerson eperson, - Collection collection) throws SQLException, AuthorizeException; + public Subscription subscribe(Context context, EPerson eperson, DSpaceObject dSpaceObject, + List subscriptionParameters, + String subscriptionType) throws SQLException, AuthorizeException; /** * Unsubscribe an e-person to a collection. Passing in null * for the collection unsubscribes the e-person from all collections they * are subscribed to. * - * @param context DSpace context - * @param eperson EPerson to unsubscribe - * @param collection Collection to unsubscribe from + * @param context DSpace context + * @param eperson EPerson to unsubscribe + * @param dSpaceObject DSpaceObject to unsubscribe from * @throws SQLException An exception that provides information on a database access error or other errors. * @throws AuthorizeException Exception indicating the current user of the context does not have permission * to perform a particular action. */ - public void unsubscribe(Context context, EPerson eperson, - Collection collection) throws SQLException, AuthorizeException; + public void unsubscribe(Context context, EPerson eperson, DSpaceObject dSpaceObject) + throws SQLException, AuthorizeException; /** * Find out which collections an e-person is subscribed to * * @param context DSpace context * @param eperson EPerson + * @param limit Number of subscriptions to return + * @param offset Offset number * @return array of collections e-person is subscribed to * @throws SQLException An exception that provides information on a database access error or other errors. */ - public List getSubscriptions(Context context, EPerson eperson) throws SQLException; + public List findSubscriptionsByEPerson(Context context, EPerson eperson, Integer limit,Integer offset) + throws SQLException; + + /** + * Find out which collections an e-person is subscribed to and related with dso + * + * @param context DSpace context + * @param eperson EPerson + * @param dSpaceObject DSpaceObject + * @param limit Number of subscriptions to return + * @param offset Offset number + * @return array of collections e-person is subscribed to and related with dso + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public List findSubscriptionsByEPersonAndDso(Context context, EPerson eperson, + DSpaceObject dSpaceObject, + Integer limit, Integer offset) throws SQLException; /** * Find out which collections the currently logged in e-person can subscribe to @@ -82,8 +109,7 @@ public void unsubscribe(Context context, EPerson eperson, * @return array of collections the currently logged in e-person can subscribe to * @throws SQLException An exception that provides information on a database access error or other errors. */ - public List getAvailableSubscriptions(Context context) - throws SQLException; + public List findAvailableSubscriptions(Context context) throws SQLException; /** * Find out which collections an e-person can subscribe to @@ -93,29 +119,27 @@ public List getAvailableSubscriptions(Context context) * @return array of collections e-person can subscribe to * @throws SQLException An exception that provides information on a database access error or other errors. */ - public List getAvailableSubscriptions(Context context, EPerson eperson) - throws SQLException; + public List findAvailableSubscriptions(Context context, EPerson eperson) throws SQLException; /** * Is that e-person subscribed to that collection? * - * @param context DSpace context - * @param eperson find out if this e-person is subscribed - * @param collection find out if subscribed to this collection + * @param context DSpace context + * @param eperson find out if this e-person is subscribed + * @param dSpaceObject find out if subscribed to this dSpaceObject * @return true if they are subscribed * @throws SQLException An exception that provides information on a database access error or other errors. */ - public boolean isSubscribed(Context context, EPerson eperson, - Collection collection) throws SQLException; + public boolean isSubscribed(Context context, EPerson eperson, DSpaceObject dSpaceObject) throws SQLException; /** * Delete subscription by collection. * - * @param context DSpace context - * @param collection find out if subscribed to this collection + * @param context DSpace context + * @param dSpaceObject find out if subscribed to this dSpaceObject * @throws SQLException An exception that provides information on a database access error or other errors. */ - public void deleteByCollection(Context context, Collection collection) throws SQLException; + public void deleteByDspaceObject(Context context, DSpaceObject dSpaceObject) throws SQLException; /** * Delete subscription by eperson (subscriber). @@ -125,4 +149,92 @@ public boolean isSubscribed(Context context, EPerson eperson, * @throws SQLException An exception that provides information on a database access error or other errors. */ public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException; -} + + /** + * Finds a subscription by id + * + * @param context DSpace context + * @param id the id of subscription to be searched + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public Subscription findById(Context context, int id) throws SQLException; + + /** + * Updates a subscription by id + * + * @param context DSpace context + * @param id Integer id + * @param subscriptionParameterList List subscriptionParameterList + * @param subscriptionType type + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public Subscription updateSubscription(Context context, Integer id, String subscriptionType, + List subscriptionParameterList) throws SQLException; + + /** + * Adds a parameter to a subscription + * + * @param context DSpace context + * @param id Integer id + * @param subscriptionParameter SubscriptionParameter subscriptionParameter + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public Subscription addSubscriptionParameter(Context context,Integer id, + SubscriptionParameter subscriptionParameter) throws SQLException; + + /** + * Deletes a parameter from subscription + * + * @param context DSpace context + * @param id Integer id + * @param subscriptionParam SubscriptionParameter subscriptionParameter + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public Subscription removeSubscriptionParameter(Context context, Integer id, + SubscriptionParameter subscriptionParam) throws SQLException; + + /** + * Deletes a subscription + * + * @param context DSpace context + * @param subscription The subscription to delete + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public void deleteSubscription(Context context, Subscription subscription) throws SQLException; + + /** + * Finds all subscriptions by subscriptionType and frequency + * + * @param context DSpace context + * @param subscriptionType Could be "content" or "statistics". NOTE: in DSpace we have only "content" + * @param frequencyValue Could be "D" stand for Day, "W" stand for Week, and "M" stand for Month + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public List findAllSubscriptionsBySubscriptionTypeAndFrequency(Context context, + String subscriptionType, String frequencyValue) throws SQLException; + + /** + * Counts all subscriptions + * + * @param context DSpace context + */ + public Long countAll(Context context) throws SQLException; + + /** + * Counts all subscriptions by ePerson + * + * @param context DSpace context + * @param ePerson EPerson ePerson + */ + public Long countSubscriptionsByEPerson(Context context, EPerson ePerson) throws SQLException; + + /** + * Counts all subscriptions by ePerson and DSO + * + * @param context DSpace context + * @param ePerson EPerson ePerson + * @param dSpaceObject DSpaceObject dSpaceObject + */ + public Long countByEPersonAndDSO(Context context, EPerson ePerson, DSpaceObject dSpaceObject) throws SQLException; + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/eperson/service/SupervisorService.java b/dspace-api/src/main/java/org/dspace/eperson/service/SupervisorService.java deleted file mode 100644 index 470c9133e59a..000000000000 --- a/dspace-api/src/main/java/org/dspace/eperson/service/SupervisorService.java +++ /dev/null @@ -1,83 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.eperson.service; - -import java.sql.SQLException; - -import org.dspace.authorize.AuthorizeException; -import org.dspace.content.WorkspaceItem; -import org.dspace.core.Context; -import org.dspace.eperson.Group; - -/** - * Class to represent the supervisor, primarily for use in applying supervisor - * activities to the database, such as setting and unsetting supervision - * orders and so forth. - * - * @author Richard Jones - * @version $Revision$ - */ -public interface SupervisorService { - - /** - * value to use for no policy set - */ - public static final int POLICY_NONE = 0; - - /** - * value to use for editor policies - */ - public static final int POLICY_EDITOR = 1; - - /** - * value to use for observer policies - */ - public static final int POLICY_OBSERVER = 2; - - /** - * finds out if there is a supervision order that matches this set - * of values - * - * @param context the context this object exists in - * @param workspaceItem the workspace item to be supervised - * @param group the group to be doing the supervising - * @return boolean true if there is an order that matches, false if not - * @throws SQLException An exception that provides information on a database access error or other errors. - */ - public boolean isOrder(Context context, WorkspaceItem workspaceItem, Group group) - throws SQLException; - - /** - * removes the requested group from the requested workspace item in terms - * of supervision. This also removes all the policies that group has - * associated with the item - * - * @param context the context this object exists in - * @param workspaceItem the ID of the workspace item - * @param group the ID of the group to be removed from the item - * @throws SQLException An exception that provides information on a database access error or other errors. - * @throws AuthorizeException Exception indicating the current user of the context does not have permission - * to perform a particular action. - */ - public void remove(Context context, WorkspaceItem workspaceItem, Group group) - throws SQLException, AuthorizeException; - - /** - * adds a supervision order to the database - * - * @param context the context this object exists in - * @param group the ID of the group which will supervise - * @param workspaceItem the ID of the workspace item to be supervised - * @param policy String containing the policy type to be used - * @throws SQLException An exception that provides information on a database access error or other errors. - * @throws AuthorizeException Exception indicating the current user of the context does not have permission - * to perform a particular action. - */ - public void add(Context context, Group group, WorkspaceItem workspaceItem, int policy) - throws SQLException, AuthorizeException; -} diff --git a/dspace-api/src/main/java/org/dspace/event/Consumer.java b/dspace-api/src/main/java/org/dspace/event/Consumer.java index 1a8b16e98a0b..f56efcc7bacb 100644 --- a/dspace-api/src/main/java/org/dspace/event/Consumer.java +++ b/dspace-api/src/main/java/org/dspace/event/Consumer.java @@ -10,18 +10,16 @@ import org.dspace.core.Context; /** - * Interface for content event consumers. Note that the consumer cannot tell if - * it is invoked synchronously or asynchronously; the consumer interface and - * sequence of calls is the same for both. Asynchronous consumers may see more - * consume() calls between the start and end of the event stream, if they are - * invoked asynchronously, once in a long time period, rather than synchronously - * after every Context.commit(). - * - * @version $Revision$ + * Interface for content event consumers. Note that the consumer cannot tell + * if it is invoked synchronously or asynchronously; the consumer interface + * and sequence of calls is the same for both. Asynchronous consumers may see + * more consume() calls between the start and end of the event stream, if they + * are invoked asynchronously, once in a long time period, rather than + * synchronously after every Context.commit(). */ public interface Consumer { /** - * Initialize - allocate any resources required to operate. This may include + * Allocate any resources required to operate. This may include * initializing any pooled JMS resources. Called ONCE when created by the * dispatcher pool. This should be used to set up expensive resources that * will remain for the lifetime of the consumer. @@ -31,12 +29,17 @@ public interface Consumer { public void initialize() throws Exception; /** - * Consume an event; events may get filtered at the dispatcher level, hiding - * it from the consumer. This behavior is based on the dispatcher/consumer - * configuration. Should include logic to initialize any resources required - * for a batch of events. + * Consume an event. Events may be filtered by a dispatcher, hiding them + * from the consumer. This behavior is based on the dispatcher/consumer + * configuration. Should include logic to initialize any resources + * required for a batch of events. + * + *

    This method must not commit the context. Committing causes + * re-dispatch of the event queue, which can result in infinite recursion + * leading to memory exhaustion as seen in + * {@link https://github.com/DSpace/DSpace/pull/8756}. * - * @param ctx the execution context object + * @param ctx the current DSpace session * @param event the content event * @throws Exception if error */ diff --git a/dspace-api/src/main/java/org/dspace/event/ConsumerProfile.java b/dspace-api/src/main/java/org/dspace/event/ConsumerProfile.java index c66c33133238..d1059f8e2fd5 100644 --- a/dspace-api/src/main/java/org/dspace/event/ConsumerProfile.java +++ b/dspace-api/src/main/java/org/dspace/event/ConsumerProfile.java @@ -104,7 +104,9 @@ private void readConfiguration() "No filters configured for consumer named: " + name); } - consumer = (Consumer) Class.forName(className.trim()).getDeclaredConstructor().newInstance(); + consumer = Class.forName(className.trim()) + .asSubclass(Consumer.class) + .getDeclaredConstructor().newInstance(); // Each "filter" is + : ... filters = new ArrayList<>(); diff --git a/dspace-api/src/main/java/org/dspace/event/Event.java b/dspace-api/src/main/java/org/dspace/event/Event.java index 45b6407b2846..af8b2d45713f 100644 --- a/dspace-api/src/main/java/org/dspace/event/Event.java +++ b/dspace-api/src/main/java/org/dspace/event/Event.java @@ -48,8 +48,6 @@ * significance varies by the combination of action and subject type. *

  • - timestamp -- exact millisecond timestamp at which event was logged.
  • * - * - * @version $Revision$ */ public class Event implements Serializable { private static final long serialVersionUID = 1L; @@ -308,6 +306,7 @@ public Event(int eventType, int subjectType, UUID subjectID, int objectType, * @param other the event to compare this one to * @return true if events are "equal", false otherwise. */ + @Override public boolean equals(Object other) { if (other instanceof Event) { Event otherEvent = (Event) other; @@ -315,14 +314,15 @@ public boolean equals(Object other) { .equals(otherEvent.detail)) && this.eventType == otherEvent.eventType && this.subjectType == otherEvent.subjectType - && this.subjectID == otherEvent.subjectID + && this.subjectID.equals(otherEvent.subjectID) && this.objectType == otherEvent.objectType - && this.objectID == otherEvent.objectID; + && this.objectID.equals(otherEvent.objectID); } return false; } + @Override public int hashCode() { return new HashCodeBuilder().append(this.detail) .append(eventType) @@ -634,6 +634,7 @@ public BitSet getBitSet() { * @return Detailed string representation of contents of this event, to * help in logging and debugging. */ + @Override public String toString() { return "org.dspace.event.Event(eventType=" + this.getEventTypeAsString() diff --git a/dspace-api/src/main/java/org/dspace/event/package-info.java b/dspace-api/src/main/java/org/dspace/event/package-info.java new file mode 100644 index 000000000000..544dfb271a1d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/event/package-info.java @@ -0,0 +1,20 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +/** + * Actions which alter DSpace model objects can queue {@link Event}s, which + * are presented to {@link Consumer}s by a {@link Dispatcher}. A pool of + * {@code Dispatcher}s is managed by an {@link service.EventService}, guided + * by configuration properties {@code event.dispatcher.*}. + * + *

    One must be careful not to commit the current DSpace {@code Context} + * during event dispatch. {@code commit()} triggers event dispatching, and + * doing this during event dispatch can lead to infinite recursion and + * memory exhaustion. + */ + +package org.dspace.event; diff --git a/dspace-api/src/main/java/org/dspace/external/OpenaireRestConnector.java b/dspace-api/src/main/java/org/dspace/external/OpenaireRestConnector.java new file mode 100644 index 000000000000..c96fad1de01c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/external/OpenaireRestConnector.java @@ -0,0 +1,348 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Base64; +import java.util.List; +import javax.xml.bind.JAXBException; + +import eu.openaire.jaxb.helper.OpenAIREHandler; +import eu.openaire.jaxb.model.Response; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.Header; +import org.apache.http.HttpHeaders; +import org.apache.http.HttpResponse; +import org.apache.http.HttpStatus; +import org.apache.http.NameValuePair; +import org.apache.http.NoHttpResponseException; +import org.apache.http.StatusLine; +import org.apache.http.client.HttpClient; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicNameValuePair; +import org.apache.logging.log4j.Logger; +import org.dspace.app.util.Util; +import org.json.JSONObject; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * based on OrcidRestConnector it's a rest connector for Openaire API providing + * ways to perform searches and token grabbing + * + * @author paulo-graca + * + */ +public class OpenaireRestConnector { + /** + * log4j logger + */ + private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenaireRestConnector.class); + + /** + * Openaire API Url + * and can be configured with: openaire.api.url + */ + private String url = "https://api.openaire.eu"; + + /** + * Boolean with token usage definition true if we want to use a token + * and can be configured with: openaire.token.enabled + */ + boolean tokenEnabled = false; + + /** + * Openaire Authorization and Authentication Token Service URL + * and can be configured with: openaire.token.url + */ + private String tokenServiceUrl; + + /** + * Openaire clientId + * and can be configured with: openaire.token.clientId + */ + private String clientId; + + /** + * OpenaireRest access token + */ + private OpenaireRestToken accessToken; + + /** + * Openaire clientSecret + * and can be configured with: openaire.token.clientSecret + */ + private String clientSecret; + + + public OpenaireRestConnector(String url) { + this.url = url; + } + + + /** + * This method grabs an accessToken an sets the expiration time Based.
    + * Based on https://develop.openaire.eu/basic.html + * + * @throws IOException + */ + public OpenaireRestToken grabNewAccessToken() throws IOException { + + if (StringUtils.isBlank(tokenServiceUrl) || StringUtils.isBlank(clientId) + || StringUtils.isBlank(clientSecret)) { + throw new IOException("Cannot grab OpenAIRE token with nulls service url, client id or secret"); + } + + String auth = clientId + ":" + clientSecret; + String encodedAuth = Base64.getEncoder().encodeToString(auth.getBytes()); + String authHeader = "Basic " + new String(encodedAuth); + + HttpPost httpPost = new HttpPost(tokenServiceUrl); + httpPost.addHeader("Accept", "application/json"); + httpPost.addHeader("User-Agent", "DSpace/" + Util.getSourceVersion()); + httpPost.addHeader("Content-Type", "application/x-www-form-urlencoded"); + httpPost.setHeader(HttpHeaders.AUTHORIZATION, authHeader); + + // Request parameters and other properties. + List params = new ArrayList(1); + params.add(new BasicNameValuePair("grant_type", "client_credentials")); + httpPost.setEntity(new UrlEncodedFormEntity(params, "UTF-8")); + + HttpClient httpClient = HttpClientBuilder.create().build(); + HttpResponse getResponse = httpClient.execute(httpPost); + + JSONObject responseObject = null; + try (InputStream is = getResponse.getEntity().getContent(); + BufferedReader streamReader = new BufferedReader(new InputStreamReader(is, "UTF-8"))) { + String inputStr; + // verify if we have basic json + while ((inputStr = streamReader.readLine()) != null && responseObject == null) { + if (inputStr.startsWith("{") && inputStr.endsWith("}") && inputStr.contains("access_token") + && inputStr.contains("expires_in")) { + try { + responseObject = new JSONObject(inputStr); + } catch (Exception e) { + // Not as valid as I'd hoped, move along + responseObject = null; + } + } + } + } + if (responseObject == null || !responseObject.has("access_token") || !responseObject.has("expires_in")) { + throw new IOException("Unable to grab the access token using provided service url, client id and secret"); + } + + return new OpenaireRestToken(responseObject.get("access_token").toString(), + Long.valueOf(responseObject.get("expires_in").toString())); + + } + + /** + * Perform a GET request to the Openaire API + * + * @param file + * @param accessToken + * @return an InputStream with a Result + */ + public InputStream get(String file, String accessToken) { + HttpResponse getResponse = null; + InputStream result = null; + file = trimSlashes(file); + + try { + URL fullPath = new URL(url + '/' + file); + + log.debug("Requesting: " + fullPath.toString()); + + HttpGet httpGet = new HttpGet(fullPath.toURI()); + if (StringUtils.isNotBlank(accessToken)) { + httpGet.addHeader("Authorization", "Bearer " + accessToken); + } + + HttpClient httpClient = HttpClientBuilder.create().build(); + getResponse = httpClient.execute(httpGet); + + StatusLine status = getResponse.getStatusLine(); + + // registering errors + switch (status.getStatusCode()) { + case HttpStatus.SC_NOT_FOUND: + // 404 - Not found + case HttpStatus.SC_FORBIDDEN: + // 403 - Invalid Access Token + case 429: + // 429 - Rate limit abuse for unauthenticated user + Header[] limitUsed = getResponse.getHeaders("x-ratelimit-used"); + Header[] limitMax = getResponse.getHeaders("x-ratelimit-limit"); + + if (limitUsed.length > 0) { + String limitMsg = limitUsed[0].getValue(); + if (limitMax.length > 0) { + limitMsg = limitMsg.concat(" of " + limitMax[0].getValue()); + } + getGotError( + new NoHttpResponseException(status.getReasonPhrase() + " with usage limit " + limitMsg), + url + '/' + file); + } else { + // 429 - Rate limit abuse + getGotError(new NoHttpResponseException(status.getReasonPhrase()), url + '/' + file); + } + break; + default: + // 200 or other + break; + } + + // do not close this httpClient + result = getResponse.getEntity().getContent(); + } catch (MalformedURLException e1) { + getGotError(e1, url + '/' + file); + } catch (Exception e) { + getGotError(e, url + '/' + file); + } + + return result; + } + + /** + * Perform an Openaire Project Search By Keywords + * + * @param page + * @param size + * @param keywords + * @return Openaire Response + */ + public Response searchProjectByKeywords(int page, int size, String... keywords) { + String path = "search/projects?keywords=" + String.join("+", keywords); + return search(path, page, size); + } + + /** + * Perform an Openaire Project Search By ID and by Funder + * + * @param projectID + * @param projectFunder + * @param page + * @param size + * @return Openaire Response + */ + public Response searchProjectByIDAndFunder(String projectID, String projectFunder, int page, int size) { + String path = "search/projects?grantID=" + projectID + "&funder=" + projectFunder; + return search(path, page, size); + } + + /** + * Perform an Openaire Search request + * + * @param path + * @param page + * @param size + * @return Openaire Response + */ + public Response search(String path, int page, int size) { + String[] queryStringPagination = { "page=" + page, "size=" + size }; + + String queryString = path + ((path.indexOf("?") > 0) ? "&" : "?") + String.join("&", queryStringPagination); + + InputStream result = null; + if (tokenEnabled) { + try { + if (accessToken == null) { + accessToken = this.grabNewAccessToken(); + } else if (!accessToken.isValidToken()) { + accessToken = this.grabNewAccessToken(); + } + + result = get(queryString, accessToken.getToken()); + } catch (IOException e) { + log.error("Error grabbing the token: " + e.getMessage()); + getGotError(e, path); + } + } else { + result = get(queryString, null); + } + + if (result != null) { + try { + return OpenAIREHandler.unmarshal(result); + } catch (JAXBException e) { + log.error("Error extracting result from request: " + queryString); + getGotError(e, path); + } + } + return null; + } + + /** + * trim slashes from the path + * + * @param path + * @return string path without trailing slashes + */ + public static String trimSlashes(String path) { + while (path.endsWith("/")) { + path = path.substring(0, path.length() - 1); + } + while (path.startsWith("/")) { + path = path.substring(1); + } + return path; + } + + /** + * stores clientId to grab the token + * + * @param clientId + */ + @Autowired(required = false) + public void setClientId(String clientId) { + this.clientId = clientId; + } + + /** + * stores tokenServiceUrl to grab the token + * + * @param tokenServiceUrl + */ + @Autowired(required = false) + public void setTokenServiceUrl(String tokenServiceUrl) { + this.tokenServiceUrl = tokenServiceUrl; + } + + /** + * stores clientSecret to grab the token + * + * @param clientSecret + */ + @Autowired(required = false) + public void setClientSecret(String clientSecret) { + this.clientSecret = clientSecret; + } + + /** + * tokenUsage true to enable the usage of an access token + * + * @param tokenEnabled true/false + */ + @Autowired(required = false) + public void setTokenEnabled(boolean tokenEnabled) { + this.tokenEnabled = tokenEnabled; + } + + protected void getGotError(Exception e, String fullPath) { + log.error("Error in rest connector for path: " + fullPath, e); + } +} diff --git a/dspace-api/src/main/java/org/dspace/external/OpenaireRestToken.java b/dspace-api/src/main/java/org/dspace/external/OpenaireRestToken.java new file mode 100644 index 000000000000..f5dc2b27f8ab --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/external/OpenaireRestToken.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external; + +/** + * Openaire rest API token to be used when grabbing an accessToken.
    + * Based on https://develop.openaire.eu/basic.html + * + * @author paulo-graca + * + */ +public class OpenaireRestToken { + + /** + * Stored access token + */ + private String accessToken; + + /** + * Stored expiration period (in seconds) + */ + private Long accessTokenExpiration = 0L; + + /** + * Stores the grabbed token + * + * @param accessToken + * @param expiresIn + */ + public OpenaireRestToken(String accessToken, Long expiresIn) { + this.accessToken = accessToken; + this.setExpirationDate(expiresIn); + } + + /** + * Returns the stored + * + * @return String with the stored token + */ + public String getToken() { + return this.accessToken; + } + + /** + * If the existing token has an expiration date and if it is at a minute of + * expiring + * + * @return + */ + public boolean isValidToken() { + if (this.accessToken == null) { + return false; + } + + return ((accessTokenExpiration - (60 * 1000)) > System.currentTimeMillis()); + } + + private void setExpirationDate(Long expiresIn) { + accessTokenExpiration = System.currentTimeMillis() + (expiresIn * 1000L); + } +} diff --git a/dspace-api/src/main/java/org/dspace/external/OrcidRestConnector.java b/dspace-api/src/main/java/org/dspace/external/OrcidRestConnector.java index 2a75b7cbd2ce..d45be7e6b56e 100644 --- a/dspace-api/src/main/java/org/dspace/external/OrcidRestConnector.java +++ b/dspace-api/src/main/java/org/dspace/external/OrcidRestConnector.java @@ -8,6 +8,7 @@ package org.dspace.external; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.Scanner; import org.apache.commons.lang3.StringUtils; @@ -15,6 +16,7 @@ import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; /** @@ -28,9 +30,9 @@ public class OrcidRestConnector { /** * log4j logger */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OrcidRestConnector.class); + private static final Logger log = LogManager.getLogger(OrcidRestConnector.class); - private String url; + private final String url; public OrcidRestConnector(String url) { this.url = url; @@ -74,7 +76,7 @@ public static String trimSlashes(String path) { } public static String convertStreamToString(InputStream is) { - Scanner s = new Scanner(is).useDelimiter("\\A"); + Scanner s = new Scanner(is, StandardCharsets.UTF_8).useDelimiter("\\A"); return s.hasNext() ? s.next() : ""; } } diff --git a/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java b/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java index cc2587056cdf..eac9921df6cc 100644 --- a/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java +++ b/dspace-api/src/main/java/org/dspace/external/model/ExternalDataObject.java @@ -7,7 +7,7 @@ */ package org.dspace.external.model; -import java.util.LinkedList; +import java.util.ArrayList; import java.util.List; import org.dspace.content.dto.MetadataValueDTO; @@ -32,7 +32,7 @@ public class ExternalDataObject { /** * The list of Metadata values. These our MetadataValueDTO because they won't exist in the DB */ - private List metadata = new LinkedList<>(); + private List metadata = new ArrayList<>(); /** * The display value of the ExternalDataObject */ @@ -87,11 +87,11 @@ public void setMetadata(List metadata) { /** * This method will add a Metadata value to the list of metadata values - * @param metadataValueDTO The metadatavalue to be added + * @param metadataValueDTO The metadata value to be added. */ public void addMetadata(MetadataValueDTO metadataValueDTO) { if (metadata == null) { - metadata = new LinkedList<>(); + metadata = new ArrayList<>(); } metadata.add(metadataValueDTO); } diff --git a/dspace-api/src/main/java/org/dspace/external/provider/AbstractExternalDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/AbstractExternalDataProvider.java new file mode 100644 index 000000000000..e08481e377ba --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/external/provider/AbstractExternalDataProvider.java @@ -0,0 +1,43 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external.provider; +import java.util.List; +import java.util.Objects; + +/** + * This abstract class allows to configure the list of supported entity types + * via spring. If no entity types are explicitly configured it is assumed that + * the provider can be used with any entity type + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public abstract class AbstractExternalDataProvider implements ExternalDataProvider { + + private List supportedEntityTypes; + + public void setSupportedEntityTypes(List supportedEntityTypes) { + this.supportedEntityTypes = supportedEntityTypes; + } + + public List getSupportedEntityTypes() { + return supportedEntityTypes; + } + + /** + * Return true if the supportedEntityTypes list is empty or contains the requested entity type + * + * @param entityType the entity type to check + * @return true if the external provider can be used to search for items of the + * specified type + */ + @Override + public boolean supportsEntityType(String entityType) { + return Objects.isNull(supportedEntityTypes) || supportedEntityTypes.contains(entityType); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/external/provider/ExternalDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/ExternalDataProvider.java index 5c921efd351d..1227b4b2fffc 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/ExternalDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/ExternalDataProvider.java @@ -57,4 +57,16 @@ public interface ExternalDataProvider { */ public int getNumberOfResults(String query); + /** + * Override this method to limit the external data provider to specific entity + * types (Publication, OrgUnit, etc.) + * + * @param entityType the entity type to check + * @return true if the external provider can be used to search for items of the + * specified type + */ + public default boolean supportsEntityType(String entityType) { + return true; + } + } diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java index 45855a74ad48..2e934462c9f1 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java @@ -15,7 +15,7 @@ import org.apache.commons.lang3.StringUtils; import org.dspace.content.dto.MetadataValueDTO; import org.dspace.external.model.ExternalDataObject; -import org.dspace.external.provider.ExternalDataProvider; +import org.dspace.external.provider.AbstractExternalDataProvider; import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.exception.MetadataSourceException; import org.dspace.importer.external.metadatamapping.MetadatumDTO; @@ -27,7 +27,7 @@ * @author Andrea Bollini (andrea.bollini at 4science.it) * */ -public class LiveImportDataProvider implements ExternalDataProvider { +public class LiveImportDataProvider extends AbstractExternalDataProvider { /** * The {@link QuerySource} live import provider */ @@ -57,7 +57,7 @@ public void setSourceIdentifier(String sourceIdentifier) { /** * This method set the MetadataSource for the ExternalDataProvider - * @param metadataSource {@link org.dspace.importer.external.service.components.MetadataSource} implementation used to process the input data + * @param querySource Source {@link org.dspace.importer.external.service.components.QuerySource} implementation used to process the input data */ public void setMetadataSource(QuerySource querySource) { this.querySource = querySource; diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenaireFundingDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenaireFundingDataProvider.java new file mode 100644 index 000000000000..62cef508c556 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/OpenaireFundingDataProvider.java @@ -0,0 +1,403 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external.provider.impl; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Base64; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import eu.openaire.jaxb.helper.FundingHelper; +import eu.openaire.jaxb.helper.ProjectHelper; +import eu.openaire.jaxb.model.Response; +import eu.openaire.jaxb.model.Result; +import eu.openaire.oaf.model.base.FunderType; +import eu.openaire.oaf.model.base.FundingTreeType; +import eu.openaire.oaf.model.base.FundingType; +import eu.openaire.oaf.model.base.Project; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; +import org.dspace.content.dto.MetadataValueDTO; +import org.dspace.external.OpenaireRestConnector; +import org.dspace.external.model.ExternalDataObject; +import org.dspace.external.provider.AbstractExternalDataProvider; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * This class is the implementation of the ExternalDataProvider interface that + * will deal with the Openaire External Data lookup + * + * @author paulo-graca + */ +public class OpenaireFundingDataProvider extends AbstractExternalDataProvider { + + private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OpenaireFundingDataProvider.class); + + /** + * GrantAgreement prefix + */ + protected static final String PREFIX = "info:eu-repo/grantAgreement"; + + private static final String TITLE = "dcTitle"; + private static final String SUBJECT = "dcSubject"; + private static final String AWARD_URI = "awardURI"; + private static final String FUNDER_NAME = "funderName"; + private static final String SPATIAL = "coverageSpatial"; + private static final String AWARD_NUMBER = "awardNumber"; + private static final String FUNDER_ID = "funderIdentifier"; + private static final String FUNDING_STREAM = "fundingStream"; + private static final String TITLE_ALTERNATIVE = "titleAlternative"; + + /** + * rows default limit + */ + protected static final int LIMIT_DEFAULT = 10; + + /** + * Source identifier (defined in beans) + */ + protected String sourceIdentifier; + + /** + * Connector to handle token and requests + */ + protected OpenaireRestConnector connector; + + protected Map metadataFields; + + public void init() throws IOException {} + + @Override + public String getSourceIdentifier() { + return sourceIdentifier; + } + + @Override + public Optional getExternalDataObject(String id) { + + // we use base64 encoding in order to use slashes / and other + // characters that must be escaped for the <:entry-id> + String decodedId = new String(Base64.getDecoder().decode(id)); + if (!isValidProjectURI(decodedId)) { + log.error("Invalid ID for OpenaireFunding - " + id); + return Optional.empty(); + } + Response response = searchByProjectURI(decodedId); + + try { + if (response.getHeader() != null && Integer.parseInt(response.getHeader().getTotal()) > 0) { + Project project = response.getResults().getResult().get(0).getMetadata().getEntity().getProject(); + ExternalDataObject externalDataObject = new OpenaireFundingDataProvider + .ExternalDataObjectBuilder(project) + .setId(generateProjectURI(project)) + .setSource(sourceIdentifier) + .build(); + return Optional.of(externalDataObject); + } + } catch (NumberFormatException e) { + log.error("Invalid Total from response - " + e.getMessage()); + } + + return Optional.empty(); + } + + @Override + public List searchExternalDataObjects(String query, int start, int limit) { + + // ensure we have a positive > 0 limit + if (limit < 1) { + limit = LIMIT_DEFAULT; + } + + // Openaire uses pages and first page starts with 1 + int page = (start / limit) + 1; + + // escaping query + String encodedQuery = encodeValue(query); + + Response projectResponse = connector.searchProjectByKeywords(page, limit, encodedQuery); + + if (projectResponse == null || projectResponse.getResults() == null) { + return Collections.emptyList(); + } + + List projects = new ArrayList(); + for (Result result : projectResponse.getResults().getResult()) { + + if (result.getMetadata() != null && result.getMetadata().getEntity() != null + && result.getMetadata().getEntity().getProject() != null) { + projects.add(result.getMetadata().getEntity().getProject()); + } else { + throw new IllegalStateException("No project found"); + } + } + + if (projects.size() > 0) { + return projects.stream() + .map(project -> new OpenaireFundingDataProvider + .ExternalDataObjectBuilder(project) + .setId(generateProjectURI(project)) + .setSource(sourceIdentifier) + .build()) + .collect(Collectors.toList()); + } + return Collections.emptyList(); + } + + @Override + public boolean supports(String source) { + return StringUtils.equalsIgnoreCase(sourceIdentifier, source); + } + + @Override + public int getNumberOfResults(String query) { + // escaping query + String encodedQuery = encodeValue(query); + + Response projectResponse = connector.searchProjectByKeywords(0, 0, encodedQuery); + return Integer.parseInt(projectResponse.getHeader().getTotal()); + } + + /** + * Generic setter for the sourceIdentifier + * + * @param sourceIdentifier The sourceIdentifier to be set on this + * OpenaireFunderDataProvider + */ + @Autowired(required = true) + public void setSourceIdentifier(String sourceIdentifier) { + this.sourceIdentifier = sourceIdentifier; + } + + public OpenaireRestConnector getConnector() { + return connector; + } + + /** + * Generic setter for OpenaireRestConnector + * + * @param connector + */ + @Autowired(required = true) + public void setConnector(OpenaireRestConnector connector) { + this.connector = connector; + } + + /** + * + * @param projectURI from type + * info:eu-repo/grantAgreement/FCT/3599-PPCDT/82130/PT + * @return Response + */ + public Response searchByProjectURI(String projectURI) { + String[] splittedURI = projectURI.replaceAll(PREFIX, "").split("/"); + return connector.searchProjectByIDAndFunder(splittedURI[3], splittedURI[1], 1, 1); + } + + /** + * Validates if the project has the correct format + * + * @param projectURI + * @return true if the URI is valid + */ + private static boolean isValidProjectURI(String projectURI) { + return Pattern.matches(PREFIX + "/.+/.+/.*", projectURI); + } + + /** + * This method returns an URI based on Openaire 3.0 guidelines + * https://guidelines.openaire.eu/en/latest/literature/field_projectid.html that + * can be used as an ID if is there any missing part, that part it will be + * replaced by the character '+' + * + * @param project + * @return String with an URI like: info:eu-repo/grantAgreement/EC/FP7/244909 + */ + private static String generateProjectURI(Project project) { + ProjectHelper projectHelper = new ProjectHelper(project.getCodeOrTitleOrAcronym()); + + String prefix = PREFIX; + String funderShortName = "+"; + String fundingName = "+"; + String code = "+"; + String jurisdiction = "+"; + + Optional fundingTree = projectHelper.getFundingTreeTypes().stream().findFirst(); + if (!fundingTree.isEmpty()) { + if (fundingTree.get().getFunder() != null) { + if (fundingTree.get().getFunder().getShortname() != null) { + funderShortName = encodeValue(fundingTree.get().getFunder().getShortname()); + } + if (fundingTree.get().getFunder().getJurisdiction() != null) { + jurisdiction = encodeValue(fundingTree.get().getFunder().getJurisdiction()); + } + } + FundingHelper fundingHelper = new FundingHelper( + fundingTree.get().getFundingLevel2OrFundingLevel1OrFundingLevel0()); + Optional funding = fundingHelper.getFirstAvailableFunding().stream().findFirst(); + + if (!funding.isEmpty()) { + fundingName = encodeValue(funding.get().getName()); + } + + } + + Optional optCode = projectHelper.getCodes().stream().findFirst(); + if (!optCode.isEmpty()) { + code = encodeValue(optCode.get()); + } + + return String.format("%s/%s/%s/%s/%s", prefix, funderShortName, fundingName, code, jurisdiction); + } + + private static String encodeValue(String value) { + try { + return URLEncoder.encode(value, StandardCharsets.UTF_8.toString()); + } catch (UnsupportedEncodingException e) { + return value; + } + } + + public Map getMetadataFields() { + return metadataFields; + } + + public void setMetadataFields(Map metadataFields) { + this.metadataFields = metadataFields; + } + + /** + * Openaire Funding External Data Builder Class + * + * @author pgraca + */ + public class ExternalDataObjectBuilder { + + private ExternalDataObject externalDataObject; + + public ExternalDataObjectBuilder(Project project) { + String funderIdPrefix = "urn:openaire:"; + this.externalDataObject = new ExternalDataObject(); + + ProjectHelper projectHelper = new ProjectHelper(project.getCodeOrTitleOrAcronym()); + for (FundingTreeType fundingTree : projectHelper.getFundingTreeTypes()) { + FunderType funder = fundingTree.getFunder(); + // Funder name + this.addMetadata(metadataFields.get(FUNDER_NAME), funder.getName()); + // Funder Id - convert it to an urn + this.addMetadata(metadataFields.get(FUNDER_ID), funderIdPrefix + funder.getId()); + // Jurisdiction + this.addMetadata(metadataFields.get(SPATIAL), funder.getJurisdiction()); + + FundingHelper fundingHelper = new FundingHelper( + fundingTree.getFundingLevel2OrFundingLevel1OrFundingLevel0()); + + // Funding description + for (FundingType funding : fundingHelper.getFirstAvailableFunding()) { + this.addMetadata(metadataFields.get(FUNDING_STREAM), funding.getDescription()); + } + } + + // Title + for (String title : projectHelper.getTitles()) { + this.addMetadata(metadataFields.get(TITLE), title); + this.setDisplayValue(title); + this.setValue(title); + } + // Code + for (String code : projectHelper.getCodes()) { + this.addMetadata(metadataFields.get(AWARD_NUMBER), code); + } + // Website url + for (String url : projectHelper.getWebsiteUrls()) { + this.addMetadata(metadataFields.get(AWARD_URI), url); + } + // Acronyms + for (String acronym : projectHelper.getAcronyms()) { + this.addMetadata(metadataFields.get(TITLE_ALTERNATIVE), acronym); + } + // Keywords + for (String keyword : projectHelper.getKeywords()) { + this.addMetadata(metadataFields.get(SUBJECT), keyword); + } + } + + /** + * Set the external data source + * + * @param source + * @return ExternalDataObjectBuilder + */ + public ExternalDataObjectBuilder setSource(String source) { + this.externalDataObject.setSource(source); + return this; + } + + /** + * Set the external data display name + * + * @param displayName + * @return ExternalDataObjectBuilder + */ + public ExternalDataObjectBuilder setDisplayValue(String displayName) { + this.externalDataObject.setDisplayValue(displayName); + return this; + } + + /** + * Set the external data value + * + * @param value + * @return ExternalDataObjectBuilder + */ + public ExternalDataObjectBuilder setValue(String value) { + this.externalDataObject.setValue(value); + return this; + } + + /** + * Set the external data id + * + * @param id + * @return ExternalDataObjectBuilder + */ + public ExternalDataObjectBuilder setId(String id) { + // we use base64 encoding in order to use slashes / and other + // characters that must be escaped for the <:entry-id> + String base64Id = Base64.getEncoder().encodeToString(id.getBytes()); + this.externalDataObject.setId(base64Id); + return this; + } + + public ExternalDataObjectBuilder addMetadata(MetadataFieldConfig metadataField, String value) { + this.externalDataObject.addMetadata(new MetadataValueDTO(metadataField.getSchema(), + metadataField.getElement(), + metadataField.getQualifier(), null, value)); + return this; + } + + /** + * Build the External Data + * + * @return ExternalDataObject + */ + public ExternalDataObject build() { + return this.externalDataObject; + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidPublicationDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidPublicationDataProvider.java new file mode 100644 index 000000000000..4fdf15a8a3ad --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidPublicationDataProvider.java @@ -0,0 +1,547 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external.provider.impl; + +import static java.util.Collections.emptyList; +import static java.util.Comparator.comparing; +import static java.util.Comparator.reverseOrder; +import static java.util.Optional.ofNullable; +import static org.apache.commons.collections4.ListUtils.partition; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.orcid.jaxb.model.common.CitationType.FORMATTED_UNSPECIFIED; + +import java.io.File; +import java.io.FileOutputStream; +import java.nio.charset.Charset; +import java.util.List; +import java.util.Optional; +import java.util.function.Supplier; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.math.NumberUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.dto.MetadataValueDTO; +import org.dspace.core.Context; +import org.dspace.external.model.ExternalDataObject; +import org.dspace.external.provider.AbstractExternalDataProvider; +import org.dspace.external.provider.ExternalDataProvider; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.service.ImportService; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.orcid.model.OrcidWorkFieldMapping; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.web.ContextUtil; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.common.WorkType; +import org.orcid.jaxb.model.v3.release.common.Contributor; +import org.orcid.jaxb.model.v3.release.common.ContributorAttributes; +import org.orcid.jaxb.model.v3.release.common.PublicationDate; +import org.orcid.jaxb.model.v3.release.common.Subtitle; +import org.orcid.jaxb.model.v3.release.common.Title; +import org.orcid.jaxb.model.v3.release.record.Citation; +import org.orcid.jaxb.model.v3.release.record.ExternalIDs; +import org.orcid.jaxb.model.v3.release.record.SourceAware; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkBulk; +import org.orcid.jaxb.model.v3.release.record.WorkContributors; +import org.orcid.jaxb.model.v3.release.record.WorkTitle; +import org.orcid.jaxb.model.v3.release.record.summary.WorkGroup; +import org.orcid.jaxb.model.v3.release.record.summary.WorkSummary; +import org.orcid.jaxb.model.v3.release.record.summary.Works; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link ExternalDataProvider} that search for all the works + * of the profile with the given orcid id that hava a source other than DSpace. + * The id of the external data objects returned by the methods of this class is + * the concatenation of the orcid id and the put code associated with the + * publication, separated by :: (example 0000-0000-0123-4567::123456) + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidPublicationDataProvider extends AbstractExternalDataProvider { + + private final static Logger LOGGER = LoggerFactory.getLogger(OrcidPublicationDataProvider.class); + + /** + * Examples of valid ORCID IDs: + *

      + *
    • 0000-0002-1825-0097
    • + *
    • 0000-0001-5109-3700
    • + *
    • 0000-0002-1694-233X
    • + *
    + */ + private final static Pattern ORCID_ID_PATTERN = Pattern.compile("(\\d{4}-){3}\\d{3}(\\d|X)"); + + private final static int MAX_PUT_CODES_SIZE = 100; + + @Autowired + private OrcidClient orcidClient; + + @Autowired + private OrcidConfiguration orcidConfiguration; + + @Autowired + private OrcidSynchronizationService orcidSynchronizationService; + + @Autowired + private ImportService importService; + + @Autowired + private OrcidTokenService orcidTokenService; + + private OrcidWorkFieldMapping fieldMapping; + + private String sourceIdentifier; + + private String readPublicAccessToken; + + @Override + public Optional getExternalDataObject(String id) { + + if (isInvalidIdentifier(id)) { + throw new IllegalArgumentException("Invalid identifier '" + id + "', expected ::"); + } + + String[] idSections = id.split("::"); + String orcid = idSections[0]; + String putCode = idSections[1]; + + validateOrcidId(orcid); + + return getWork(orcid, putCode) + .filter(work -> hasDifferentSourceClientId(work)) + .filter(work -> work.getPutCode() != null) + .map(work -> convertToExternalDataObject(orcid, work)); + } + + @Override + public List searchExternalDataObjects(String orcid, int start, int limit) { + + validateOrcidId(orcid); + + return findWorks(orcid, start, limit).stream() + .map(work -> convertToExternalDataObject(orcid, work)) + .collect(Collectors.toList()); + } + + private boolean isInvalidIdentifier(String id) { + return StringUtils.isBlank(id) || id.split("::").length != 2; + } + + private void validateOrcidId(String orcid) { + if (!ORCID_ID_PATTERN.matcher(orcid).matches()) { + throw new IllegalArgumentException("The given ORCID ID is not valid: " + orcid); + } + } + + /** + * Returns all the works related to the given ORCID in the range from start and + * limit. + * + * @param orcid the ORCID ID of the author to search for works + * @param start the start index + * @param limit the limit index + * @return the list of the works + */ + private List findWorks(String orcid, int start, int limit) { + List workSummaries = findWorkSummaries(orcid, start, limit); + return findWorks(orcid, workSummaries); + } + + /** + * Returns all the works summaries related to the given ORCID in the range from + * start and limit. + * + * @param orcid the ORCID ID of the author to search for works summaries + * @param start the start index + * @param limit the limit index + * @return the list of the works summaries + */ + private List findWorkSummaries(String orcid, int start, int limit) { + return getWorks(orcid).getWorkGroup().stream() + .filter(workGroup -> allWorkSummariesHaveDifferentSourceClientId(workGroup)) + .map(workGroup -> getPreferredWorkSummary(workGroup)) + .flatMap(Optional::stream) + .skip(start) + .limit(limit > 0 ? limit : Long.MAX_VALUE) + .collect(Collectors.toList()); + } + + /** + * Returns all the works related to the given ORCID ID and work summaries (a + * work has more details than a work summary). + * + * @param orcid the ORCID id of the author to search for works + * @param workSummaries the work summaries used to search the related works + * @return the list of the works + */ + private List findWorks(String orcid, List workSummaries) { + + List workPutCodes = getPutCodes(workSummaries); + + if (CollectionUtils.isEmpty(workPutCodes)) { + return emptyList(); + } + + if (workPutCodes.size() == 1) { + return getWork(orcid, workPutCodes.get(0)).stream().collect(Collectors.toList()); + } + + return partition(workPutCodes, MAX_PUT_CODES_SIZE).stream() + .map(putCodes -> getWorkBulk(orcid, putCodes)) + .flatMap(workBulk -> getWorks(workBulk).stream()) + .collect(Collectors.toList()); + } + + /** + * Search a work by ORCID id and putcode, using API or PUBLIC urls based on + * whether the ORCID API keys are configured or not. + * + * @param orcid the ORCID ID + * @param putCode the work's identifier on ORCID + * @return the work, if any + */ + private Optional getWork(String orcid, String putCode) { + if (orcidConfiguration.isApiConfigured()) { + String accessToken = getAccessToken(orcid); + return orcidClient.getObject(accessToken, orcid, putCode, Work.class); + } else { + return orcidClient.getObject(orcid, putCode, Work.class); + } + } + + /** + * Returns all the works related to the given ORCID. + * + * @param orcid the ORCID ID of the author to search for works + * @return the list of the works + */ + private Works getWorks(String orcid) { + if (orcidConfiguration.isApiConfigured()) { + String accessToken = getAccessToken(orcid); + return orcidClient.getWorks(accessToken, orcid); + } else { + return orcidClient.getWorks(orcid); + } + } + + /** + * Returns all the works related to the given ORCID by the given putCodes. + * + * @param orcid the ORCID ID of the author to search for works + * @param putCodes the work's put codes to search + * @return the list of the works + */ + private WorkBulk getWorkBulk(String orcid, List putCodes) { + if (orcidConfiguration.isApiConfigured()) { + String accessToken = getAccessToken(orcid); + return orcidClient.getWorkBulk(accessToken, orcid, putCodes); + } else { + return orcidClient.getWorkBulk(orcid, putCodes); + } + } + + private String getAccessToken(String orcid) { + List items = orcidSynchronizationService.findProfilesByOrcid(new Context(), orcid); + return Optional.ofNullable(items.isEmpty() ? null : items.get(0)) + .flatMap(item -> getAccessToken(item)) + .orElseGet(() -> getReadPublicAccessToken()); + } + + private Optional getAccessToken(Item item) { + return ofNullable(orcidTokenService.findByProfileItem(getContext(), item)) + .map(OrcidToken::getAccessToken); + } + + private String getReadPublicAccessToken() { + if (readPublicAccessToken != null) { + return readPublicAccessToken; + } + + OrcidTokenResponseDTO accessTokenResponse = orcidClient.getReadPublicAccessToken(); + readPublicAccessToken = accessTokenResponse.getAccessToken(); + + return readPublicAccessToken; + } + + private List getWorks(WorkBulk workBulk) { + return workBulk.getBulk().stream() + .filter(bulkElement -> (bulkElement instanceof Work)) + .map(bulkElement -> ((Work) bulkElement)) + .collect(Collectors.toList()); + + } + + private List getPutCodes(List workSummaries) { + return workSummaries.stream() + .map(WorkSummary::getPutCode) + .map(String::valueOf) + .collect(Collectors.toList()); + } + + private Optional getPreferredWorkSummary(WorkGroup workGroup) { + return workGroup.getWorkSummary().stream() + .filter(work -> work.getPutCode() != null) + .filter(work -> NumberUtils.isCreatable(work.getDisplayIndex())) + .sorted(comparing(work -> Integer.valueOf(work.getDisplayIndex()), reverseOrder())) + .findFirst(); + } + + private ExternalDataObject convertToExternalDataObject(String orcid, Work work) { + ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier); + externalDataObject.setId(orcid + "::" + work.getPutCode().toString()); + + String title = getWorkTitle(work); + externalDataObject.setDisplayValue(title); + externalDataObject.setValue(title); + + addMetadataValue(externalDataObject, fieldMapping.getTitleField(), () -> title); + addMetadataValue(externalDataObject, fieldMapping.getTypeField(), () -> getWorkType(work)); + addMetadataValue(externalDataObject, fieldMapping.getPublicationDateField(), () -> getPublicationDate(work)); + addMetadataValue(externalDataObject, fieldMapping.getJournalTitleField(), () -> getJournalTitle(work)); + addMetadataValue(externalDataObject, fieldMapping.getSubTitleField(), () -> getSubTitleField(work)); + addMetadataValue(externalDataObject, fieldMapping.getShortDescriptionField(), () -> getDescription(work)); + addMetadataValue(externalDataObject, fieldMapping.getLanguageField(), () -> getLanguage(work)); + + for (String contributorField : fieldMapping.getContributorFields().keySet()) { + ContributorRole role = fieldMapping.getContributorFields().get(contributorField); + addMetadataValues(externalDataObject, contributorField, () -> getContributors(work, role)); + } + + for (String externalIdField : fieldMapping.getExternalIdentifierFields().keySet()) { + String type = fieldMapping.getExternalIdentifierFields().get(externalIdField); + addMetadataValues(externalDataObject, externalIdField, () -> getExternalIds(work, type)); + } + + try { + addMetadataValuesFromCitation(externalDataObject, work.getWorkCitation()); + } catch (Exception e) { + LOGGER.error("An error occurs reading the following citation: " + work.getWorkCitation().getCitation(), e); + } + + return externalDataObject; + } + + private boolean allWorkSummariesHaveDifferentSourceClientId(WorkGroup workGroup) { + return workGroup.getWorkSummary().stream().allMatch(this::hasDifferentSourceClientId); + } + + @SuppressWarnings("deprecation") + private boolean hasDifferentSourceClientId(SourceAware sourceAware) { + return Optional.ofNullable(sourceAware.getSource()) + .map(source -> source.getSourceClientId()) + .map(sourceClientId -> sourceClientId.getPath()) + .map(clientId -> !StringUtils.equals(orcidConfiguration.getClientId(), clientId)) + .orElse(true); + } + + private void addMetadataValues(ExternalDataObject externalData, String metadata, Supplier> values) { + + if (StringUtils.isBlank(metadata)) { + return; + } + + MetadataFieldName field = new MetadataFieldName(metadata); + for (String value : values.get()) { + externalData.addMetadata(new MetadataValueDTO(field.schema, field.element, field.qualifier, null, value)); + } + } + + private void addMetadataValue(ExternalDataObject externalData, String metadata, Supplier valueSupplier) { + addMetadataValues(externalData, metadata, () -> { + String value = valueSupplier.get(); + return isNotBlank(value) ? List.of(value) : emptyList(); + }); + } + + private String getWorkTitle(Work work) { + WorkTitle workTitle = work.getWorkTitle(); + if (workTitle == null) { + return null; + } + Title title = workTitle.getTitle(); + return title != null ? title.getContent() : null; + } + + private String getWorkType(Work work) { + WorkType workType = work.getWorkType(); + return workType != null ? fieldMapping.convertType(workType.value()) : null; + } + + private String getPublicationDate(Work work) { + PublicationDate publicationDate = work.getPublicationDate(); + if (publicationDate == null) { + return null; + } + + StringBuilder builder = new StringBuilder(publicationDate.getYear().getValue()); + if (publicationDate.getMonth() != null) { + builder.append("-"); + builder.append(publicationDate.getMonth().getValue()); + } + + if (publicationDate.getDay() != null) { + builder.append("-"); + builder.append(publicationDate.getDay().getValue()); + } + + return builder.toString(); + } + + private String getJournalTitle(Work work) { + Title journalTitle = work.getJournalTitle(); + return journalTitle != null ? journalTitle.getContent() : null; + } + + private String getSubTitleField(Work work) { + WorkTitle workTitle = work.getWorkTitle(); + if (workTitle == null) { + return null; + } + Subtitle subTitle = workTitle.getSubtitle(); + return subTitle != null ? subTitle.getContent() : null; + } + + private String getDescription(Work work) { + return work.getShortDescription(); + } + + private String getLanguage(Work work) { + return work.getLanguageCode() != null ? fieldMapping.convertLanguage(work.getLanguageCode()) : null; + } + + private List getContributors(Work work, ContributorRole role) { + WorkContributors workContributors = work.getWorkContributors(); + if (workContributors == null) { + return emptyList(); + } + + return workContributors.getContributor().stream() + .filter(contributor -> hasRole(contributor, role)) + .map(contributor -> getContributorName(contributor)) + .flatMap(Optional::stream) + .collect(Collectors.toList()); + } + + private void addMetadataValuesFromCitation(ExternalDataObject externalDataObject, Citation citation) + throws Exception { + + if (citation == null || citation.getWorkCitationType() == FORMATTED_UNSPECIFIED) { + return; + } + + getImportRecord(citation).ifPresent(importRecord -> enrichExternalDataObject(externalDataObject, importRecord)); + + } + + private Optional getImportRecord(Citation citation) throws Exception { + File citationFile = File.createTempFile("temp", "." + citation.getWorkCitationType().value()); + try (FileOutputStream outputStream = new FileOutputStream(citationFile)) { + IOUtils.write(citation.getCitation(), new FileOutputStream(citationFile), Charset.defaultCharset()); + return Optional.ofNullable(importService.getRecord(citationFile, citationFile.getName())); + } finally { + citationFile.delete(); + } + } + + private void enrichExternalDataObject(ExternalDataObject externalDataObject, ImportRecord importRecord) { + importRecord.getValueList().stream() + .filter(metadata -> doesNotContains(externalDataObject, metadata)) + .forEach(metadata -> addMetadata(externalDataObject, metadata)); + } + + private void addMetadata(ExternalDataObject externalDataObject, MetadatumDTO metadata) { + externalDataObject.addMetadata(new MetadataValueDTO(metadata.getSchema(), metadata.getElement(), + metadata.getQualifier(), null, metadata.getValue())); + } + + private boolean doesNotContains(ExternalDataObject externalDataObject, MetadatumDTO metadata) { + return externalDataObject.getMetadata().stream() + .filter(metadataValue -> StringUtils.equals(metadataValue.getSchema(), metadata.getSchema())) + .filter(metadataValue -> StringUtils.equals(metadataValue.getElement(), metadata.getElement())) + .filter(metadataValue -> StringUtils.equals(metadataValue.getQualifier(), metadata.getQualifier())) + .findAny().isEmpty(); + } + + private boolean hasRole(Contributor contributor, ContributorRole role) { + ContributorAttributes attributes = contributor.getContributorAttributes(); + return attributes != null ? role.equals(attributes.getContributorRole()) : false; + } + + private Optional getContributorName(Contributor contributor) { + return Optional.ofNullable(contributor.getCreditName()) + .map(creditName -> creditName.getContent()); + } + + private List getExternalIds(Work work, String type) { + ExternalIDs externalIdentifiers = work.getExternalIdentifiers(); + if (externalIdentifiers == null) { + return emptyList(); + } + + return externalIdentifiers.getExternalIdentifier().stream() + .filter(externalId -> type.equals(externalId.getType())) + .map(externalId -> externalId.getValue()) + .collect(Collectors.toList()); + } + + private Context getContext() { + Context context = ContextUtil.obtainCurrentRequestContext(); + return context != null ? context : new Context(); + } + + @Override + public boolean supports(String source) { + return StringUtils.equals(sourceIdentifier, source); + } + + @Override + public int getNumberOfResults(String orcid) { + return findWorkSummaries(orcid, 0, -1).size(); + } + + public void setSourceIdentifier(String sourceIdentifier) { + this.sourceIdentifier = sourceIdentifier; + } + + @Override + public String getSourceIdentifier() { + return sourceIdentifier; + } + + public void setFieldMapping(OrcidWorkFieldMapping fieldMapping) { + this.fieldMapping = fieldMapping; + } + + public void setReadPublicAccessToken(String readPublicAccessToken) { + this.readPublicAccessToken = readPublicAccessToken; + } + + public OrcidClient getOrcidClient() { + return orcidClient; + } + + public void setOrcidClient(OrcidClient orcidClient) { + this.orcidClient = orcidClient; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java index 629ff3829d53..125da8f7c67b 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/OrcidV3AuthorDataProvider.java @@ -16,6 +16,7 @@ import java.util.Collections; import java.util.LinkedList; import java.util.List; +import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -29,7 +30,7 @@ import org.dspace.content.dto.MetadataValueDTO; import org.dspace.external.OrcidRestConnector; import org.dspace.external.model.ExternalDataObject; -import org.dspace.external.provider.ExternalDataProvider; +import org.dspace.external.provider.AbstractExternalDataProvider; import org.dspace.external.provider.orcid.xml.XMLtoBio; import org.json.JSONObject; import org.orcid.jaxb.model.v3.release.common.OrcidIdentifier; @@ -41,7 +42,7 @@ * This class is the implementation of the ExternalDataProvider interface that will deal with the OrcidV3 External * Data lookup */ -public class OrcidV3AuthorDataProvider implements ExternalDataProvider { +public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider { private static final Logger log = LogManager.getLogger(OrcidV3AuthorDataProvider.class); @@ -139,7 +140,7 @@ protected ExternalDataObject convertToExternalDataObject(Person person) { new MetadataValueDTO("person", "identifier", "orcid", null, person.getName().getPath())); externalDataObject .addMetadata(new MetadataValueDTO("dc", "identifier", "uri", null, - orcidUrl + person.getName().getPath())); + orcidUrl + "/" + person.getName().getPath())); if (!StringUtils.isBlank(lastName) && !StringUtils.isBlank(firstName)) { externalDataObject.setDisplayValue(lastName + ", " + firstName); externalDataObject.setValue(lastName + ", " + firstName); @@ -217,11 +218,10 @@ public List searchExternalDataObjects(String query, int star } catch (IOException e) { log.error(e.getMessage(), e); } - if (bios == null) { + if (Objects.isNull(bios)) { return Collections.emptyList(); - } else { - return bios.stream().map(bio -> convertToExternalDataObject(bio)).collect(Collectors.toList()); } + return bios.stream().map(bio -> convertToExternalDataObject(bio)).collect(Collectors.toList()); } @Override diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalDataProvider.java index 42d3cab494ec..a4276c83ed70 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalDataProvider.java @@ -15,14 +15,13 @@ import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.Logger; import org.dspace.app.sherpa.SHERPAService; import org.dspace.app.sherpa.v2.SHERPAJournal; import org.dspace.app.sherpa.v2.SHERPAResponse; import org.dspace.app.sherpa.v2.SHERPAUtils; import org.dspace.content.dto.MetadataValueDTO; import org.dspace.external.model.ExternalDataObject; -import org.dspace.external.provider.ExternalDataProvider; +import org.dspace.external.provider.AbstractExternalDataProvider; /** * This class is the implementation of the ExternalDataProvider interface that will deal with SherpaJournal External @@ -31,9 +30,7 @@ * * @author Kim Shepherd */ -public class SHERPAv2JournalDataProvider implements ExternalDataProvider { - - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAv2JournalDataProvider.class); +public class SHERPAv2JournalDataProvider extends AbstractExternalDataProvider { // Source identifier (configured in spring configuration) private String sourceIdentifier; diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalISSNDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalISSNDataProvider.java index ed2df53c838e..9e61b9ac2ac0 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalISSNDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalISSNDataProvider.java @@ -22,7 +22,7 @@ import org.dspace.app.sherpa.v2.SHERPAUtils; import org.dspace.content.dto.MetadataValueDTO; import org.dspace.external.model.ExternalDataObject; -import org.dspace.external.provider.ExternalDataProvider; +import org.dspace.external.provider.AbstractExternalDataProvider; /** * This class is the implementation of the ExternalDataProvider interface that will deal with SherpaJournal External @@ -32,7 +32,7 @@ * * @author Kim Shepherd */ -public class SHERPAv2JournalISSNDataProvider implements ExternalDataProvider { +public class SHERPAv2JournalISSNDataProvider extends AbstractExternalDataProvider { private static final Logger log = org.apache.logging.log4j.LogManager.getLogger( diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2PublisherDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2PublisherDataProvider.java index 03f172853e0e..af922220ce54 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2PublisherDataProvider.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2PublisherDataProvider.java @@ -15,14 +15,13 @@ import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.Logger; import org.dspace.app.sherpa.SHERPAService; import org.dspace.app.sherpa.v2.SHERPAPublisher; import org.dspace.app.sherpa.v2.SHERPAPublisherResponse; import org.dspace.app.sherpa.v2.SHERPAUtils; import org.dspace.content.dto.MetadataValueDTO; import org.dspace.external.model.ExternalDataObject; -import org.dspace.external.provider.ExternalDataProvider; +import org.dspace.external.provider.AbstractExternalDataProvider; /** * This class is the implementation of the ExternalDataProvider interface that will deal with SHERPAPublisher External @@ -33,11 +32,7 @@ * * @author Kim Shepherd */ -public class SHERPAv2PublisherDataProvider implements ExternalDataProvider { - - // Logger - private static final Logger log = - org.apache.logging.log4j.LogManager.getLogger(SHERPAv2PublisherDataProvider.class); +public class SHERPAv2PublisherDataProvider extends AbstractExternalDataProvider { // Source identifier (eg 'sherpaPublisher') configured in spring configuration private String sourceIdentifier; diff --git a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java index 582c49d4077e..756b8654f285 100644 --- a/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java +++ b/dspace-api/src/main/java/org/dspace/external/provider/orcid/xml/Converter.java @@ -12,8 +12,10 @@ import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; -import org.apache.logging.log4j.Logger; import org.xml.sax.SAXException; /** @@ -25,20 +27,20 @@ */ public abstract class Converter { - /** - * log4j logger - */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Converter.class); - public abstract T convert(InputStream document); protected Object unmarshall(InputStream input, Class type) throws SAXException, URISyntaxException { try { + XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory(); + // disallow DTD parsing to ensure no XXE attacks can occur + xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false); + XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(input); + JAXBContext context = JAXBContext.newInstance(type); Unmarshaller unmarshaller = context.createUnmarshaller(); - return unmarshaller.unmarshal(input); - } catch (JAXBException e) { - throw new RuntimeException("Unable to unmarshall orcid message" + e); + return unmarshaller.unmarshal(xmlStreamReader); + } catch (JAXBException | XMLStreamException e) { + throw new RuntimeException("Unable to unmarshall orcid message: " + e); } } } diff --git a/dspace-api/src/main/java/org/dspace/external/service/ExternalDataService.java b/dspace-api/src/main/java/org/dspace/external/service/ExternalDataService.java index e0c241ba4a53..53423395e3fa 100644 --- a/dspace-api/src/main/java/org/dspace/external/service/ExternalDataService.java +++ b/dspace-api/src/main/java/org/dspace/external/service/ExternalDataService.java @@ -77,4 +77,25 @@ public interface ExternalDataService { WorkspaceItem createWorkspaceItemFromExternalDataObject(Context context, ExternalDataObject externalDataObject, Collection collection) throws AuthorizeException, SQLException; + + /** + * Return the ExternalDataProvider that supports a specific entity type + * + * @param entityType + * @return list of ExternalDataProviders that supports a specific entity type + */ + public List getExternalDataProvidersForEntityType(String entityType); + + /** + * Override this method to limit the external data provider to specific entity + * types (Publication, OrgUnit, etc.) + * + * @param entityType the entity type to check + * @return true if the external provider can be used to search for items of the + * specified type + */ + public default boolean supportsEntityType(String entityType) { + return true; + } + } diff --git a/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java b/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java index 290345aff103..f91ea00cac4a 100644 --- a/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/external/service/impl/ExternalDataServiceImpl.java @@ -10,10 +10,10 @@ import java.sql.SQLException; import java.util.List; import java.util.Optional; +import java.util.stream.Collectors; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; -import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Collection; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; @@ -21,7 +21,7 @@ import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.external.model.ExternalDataObject; import org.dspace.external.provider.ExternalDataProvider; import org.dspace.external.service.ExternalDataService; @@ -44,9 +44,6 @@ public class ExternalDataServiceImpl implements ExternalDataService { @Autowired private WorkspaceItemService workspaceItemService; - @Autowired - private AuthorizeService authorizeService; - @Override public Optional getExternalDataObject(String source, String id) { ExternalDataProvider provider = getExternalDataProvider(source); @@ -105,9 +102,17 @@ public WorkspaceItem createWorkspaceItemFromExternalDataObject(Context context, metadataValueDTO.getConfidence()); } - log.info(LogManager.getHeader(context, "create_item_from_externalDataObject", "Created item" + + log.info(LogHelper.getHeader(context, "create_item_from_externalDataObject", "Created item" + "with id: " + item.getID() + " from source: " + externalDataObject.getSource() + " with identifier: " + externalDataObject.getId())); return workspaceItem; } + + @Override + public List getExternalDataProvidersForEntityType(String entityType) { + return externalDataProviders.stream() + .filter(edp -> edp.supportsEntityType(entityType)) + .collect(Collectors.toList()); + } + } diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleAnalyticsEvent.java b/dspace-api/src/main/java/org/dspace/google/GoogleAnalyticsEvent.java new file mode 100644 index 000000000000..50da0e528353 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/GoogleAnalyticsEvent.java @@ -0,0 +1,99 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google; + +import java.util.Objects; + +import org.springframework.util.Assert; + +/** + * This is a dataholder class for an individual event to be sent to Google Analaytics + * + * @author April Herron + */ +public final class GoogleAnalyticsEvent { + + private final String clientId; + private final String userIp; + private final String userAgent; + private final String documentReferrer; + private final String documentPath; + private final String documentTitle; + private final long time; + + public GoogleAnalyticsEvent(String clientId, String userIp, String userAgent, String documentReferrer, + String documentPath, String documentTitle) { + Assert.notNull(clientId, "A client id is required to create a Google Analytics event"); + this.clientId = clientId; + this.userIp = userIp; + this.userAgent = userAgent; + this.documentReferrer = documentReferrer; + this.documentPath = documentPath; + this.documentTitle = documentTitle; + this.time = System.currentTimeMillis(); + } + + public String getClientId() { + return clientId; + } + + public String getUserIp() { + return userIp; + } + + public String getUserAgent() { + return userAgent != null ? userAgent : ""; + } + + public String getDocumentReferrer() { + return documentReferrer != null ? documentReferrer : ""; + } + + public String getDocumentPath() { + return documentPath; + } + + public String getDocumentTitle() { + return documentTitle; + } + + public long getTime() { + return time; + } + + @Override + public int hashCode() { + return Objects.hash(clientId, documentPath, documentReferrer, documentTitle, time, userAgent, userIp); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + GoogleAnalyticsEvent other = (GoogleAnalyticsEvent) obj; + return Objects.equals(clientId, other.clientId) && Objects.equals(documentPath, other.documentPath) + && Objects.equals(documentReferrer, other.documentReferrer) + && Objects.equals(documentTitle, other.documentTitle) && time == other.time + && Objects.equals(userAgent, other.userAgent) && Objects.equals(userIp, other.userIp); + } + + @Override + public String toString() { + return "GoogleAnalyticsEvent [clientId=" + clientId + ", userIp=" + userIp + ", userAgent=" + userAgent + + ", documentReferrer=" + documentReferrer + ", documentPath=" + documentPath + ", documentTitle=" + + documentTitle + ", time=" + time + "]"; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java new file mode 100644 index 000000000000..c1c59acf4a63 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/GoogleAsyncEventListener.java @@ -0,0 +1,320 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; +import javax.annotation.PostConstruct; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.collections.Buffer; +import org.apache.commons.collections.BufferUtils; +import org.apache.commons.collections.buffer.CircularFifoBuffer; +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.google.client.GoogleAnalyticsClient; +import org.dspace.service.ClientInfoService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.model.Event; +import org.dspace.usage.AbstractUsageEventListener; +import org.dspace.usage.UsageEvent; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Notifies Google Analytics of Bitstream VIEW events. These events are stored in memory and then + * asynchronously processed by a single seperate thread. + * + * @author April Herron + * @author Luca Giamminonni + */ +public class GoogleAsyncEventListener extends AbstractUsageEventListener { + + // 20 is the event max set by the GA API + public static final int GA_MAX_EVENTS = 20; + + private static final Logger LOGGER = LogManager.getLogger(); + + private static final int MAX_TIME_SINCE_EVENT = 14400000; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private ClientInfoService clientInfoService; + + @Autowired + private List googleAnalyticsClients; + + private Buffer eventsBuffer; + + @PostConstruct + public void init() { + int analyticsBufferlimit = configurationService.getIntProperty("google.analytics.buffer.limit", 256); + eventsBuffer = BufferUtils.synchronizedBuffer(new CircularFifoBuffer(analyticsBufferlimit)); + } + + @Override + @SuppressWarnings("unchecked") + public void receiveEvent(Event event) { + + if (!(event instanceof UsageEvent) || isGoogleAnalyticsKeyNotConfigured()) { + return; + } + + UsageEvent usageEvent = (UsageEvent) event; + LOGGER.debug("Usage event received " + event.getName()); + + if (!isContentBitstream(usageEvent)) { + return; + } + + try { + GoogleAnalyticsEvent analyticsEvent = createGoogleAnalyticsEvent(usageEvent); + eventsBuffer.add(analyticsEvent); + } catch (Exception e) { + logReceiveEventException(usageEvent, e); + } + + } + + /** + * Send the collected events to Google Analytics. + */ + public void sendCollectedEvents() { + + if (isGoogleAnalyticsKeyNotConfigured()) { + return; + } + + String analyticsKey = getGoogleAnalyticsKey(); + + List events = getEventsFromBufferFilteredByEventTime(); + + if (events.isEmpty()) { + return; + } + + GoogleAnalyticsClient client = getClientByAnalyticsKey(analyticsKey); + + try { + client.sendEvents(analyticsKey, events); + } catch (RuntimeException ex) { + LOGGER.error("An error occurs sending the events.", ex); + } + + } + + /** + * Creates an instance of GoogleAnalyticsEvent from the given usage event. + * @param usageEvent the usage event + * @return the Google Analytics event instance + */ + private GoogleAnalyticsEvent createGoogleAnalyticsEvent(UsageEvent usageEvent) { + + HttpServletRequest request = usageEvent.getRequest(); + + String clientId = getClientId(usageEvent); + String referrer = getReferrer(usageEvent); + String clientIp = clientInfoService.getClientIp(request); + String userAgent = request.getHeader("USER-AGENT"); + String documentPath = getDocumentPath(request); + String documentName = getObjectName(usageEvent); + + return new GoogleAnalyticsEvent(clientId, clientIp, userAgent, referrer, + documentPath, documentName); + } + + /** + * Client ID, should uniquely identify the user or device. If we have an + * X-CORRELATION-ID header or a session ID for the user, then lets use it, + * othwerwise generate a UUID. + */ + private String getClientId(UsageEvent usageEvent) { + if (usageEvent.getRequest().getHeader("X-CORRELATION-ID") != null) { + return usageEvent.getRequest().getHeader("X-CORRELATION-ID"); + } else if (usageEvent.getRequest().getSession(false) != null) { + return usageEvent.getRequest().getSession().getId(); + } else { + return UUID.randomUUID().toString(); + } + } + + /** + * Prefer the X-REFERRER header, otherwise fallback to the referrer header. + */ + private String getReferrer(UsageEvent usageEvent) { + if (usageEvent.getRequest().getHeader("X-REFERRER") != null) { + return usageEvent.getRequest().getHeader("X-REFERRER"); + } else { + return usageEvent.getRequest().getHeader("referer"); + } + } + + private String getDocumentPath(HttpServletRequest request) { + String documentPath = request.getRequestURI(); + if (StringUtils.isNotBlank(request.getQueryString())) { + documentPath += "?" + request.getQueryString(); + } + return documentPath; + } + + /** + * Verifies if the usage event is a content bitstream view event, by checking if:
      + *
    • the usage event is a view event
    • + *
    • the object of the usage event is a bitstream
    • + *
    • the bitstream belongs to one of the configured bundles (fallback: ORIGINAL bundle)
    + */ + private boolean isContentBitstream(UsageEvent usageEvent) { + // check if event is a VIEW event and object is a Bitstream + if (usageEvent.getAction() == UsageEvent.Action.VIEW + && usageEvent.getObject().getType() == Constants.BITSTREAM) { + // check if bitstream belongs to a configured bundle + List allowedBundles = List.of(configurationService + .getArrayProperty("google-analytics.bundles", new String[]{Constants.CONTENT_BUNDLE_NAME})); + if (allowedBundles.contains("none")) { + // GA events for bitstream views were turned off in config + return false; + } + List bitstreamBundles; + try { + bitstreamBundles = ((Bitstream) usageEvent.getObject()) + .getBundles().stream().map(Bundle::getName).collect(Collectors.toList()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + return allowedBundles.stream().anyMatch(bitstreamBundles::contains); + } + return false; + } + + private boolean isGoogleAnalyticsKeyNotConfigured() { + return StringUtils.isBlank(getGoogleAnalyticsKey()); + } + + private void logReceiveEventException(UsageEvent usageEvent, Exception e) { + + LOGGER.error("Failed to add event to buffer", e); + LOGGER.error("Event information: " + usageEvent); + + Context context = usageEvent.getContext(); + if (context == null) { + LOGGER.error("UsageEvent has no Context object"); + return; + } + + LOGGER.error("Context information:"); + LOGGER.error(" Current User: " + context.getCurrentUser()); + LOGGER.error(" Extra log info: " + context.getExtraLogInfo()); + if (context.getEvents() != null && !context.getEvents().isEmpty()) { + for (int x = 1; x <= context.getEvents().size(); x++) { + LOGGER.error(" Context Event " + x + ": " + context.getEvents().get(x)); + } + } + + } + + private String getObjectName(UsageEvent ue) { + try { + if (ue.getObject().getType() == Constants.BITSTREAM) { + // For a bitstream download we really want to know the title of the owning item + // rather than the bitstream name. + return ContentServiceFactory.getInstance().getDSpaceObjectService(ue.getObject()) + .getParentObject(ue.getContext(), ue.getObject()).getName(); + } else { + return ue.getObject().getName(); + } + } catch (SQLException e) { + // This shouldn't merit interrupting the user's transaction so log the error and continue. + LOGGER.error("Error in Google Analytics recording - can't determine ParentObjectName for bitstream " + + ue.getObject().getID(), e); + } + + return null; + + } + + /** + * Returns the first GA_MAX_EVENTS stored in the eventsBuffer with a time minor + * that MAX_TIME_SINCE_EVENT. The found events are removed from the buffer. + * + * @return the events from the buffer + */ + private List getEventsFromBufferFilteredByEventTime() { + + List events = new ArrayList<>(); + + Iterator iterator = eventsBuffer.iterator(); + + while (iterator.hasNext() && events.size() < GA_MAX_EVENTS) { + + GoogleAnalyticsEvent event = (GoogleAnalyticsEvent) iterator.next(); + eventsBuffer.remove(event); + + if ((System.currentTimeMillis() - event.getTime()) < MAX_TIME_SINCE_EVENT) { + events.add(event); + } + + } + + return events; + } + + /** + * Returns the first instance of the GoogleAnalyticsClient that supports the + * given analytics key. + * + * @param analyticsKey the analytics key. + * @return the found client + * @throws IllegalStateException if no client is found for the given analytics + * key + */ + private GoogleAnalyticsClient getClientByAnalyticsKey(String analyticsKey) { + + List clients = googleAnalyticsClients.stream() + .filter(client -> client.isAnalyticsKeySupported(analyticsKey)) + .collect(Collectors.toList()); + + if (clients.isEmpty()) { + throw new IllegalStateException("No Google Analytics Client supports key " + analyticsKey); + } + + if (clients.size() > 1) { + throw new IllegalStateException("More than one Google Analytics Client supports key " + analyticsKey); + } + + return clients.get(0); + + } + + private String getGoogleAnalyticsKey() { + return configurationService.getProperty("google.analytics.key"); + } + + public List getGoogleAnalyticsClients() { + return googleAnalyticsClients; + } + + public void setGoogleAnalyticsClients(List googleAnalyticsClients) { + this.googleAnalyticsClients = googleAnalyticsClients; + } + + public Buffer getEventsBuffer() { + return eventsBuffer; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/google/GoogleRecorderEventListener.java b/dspace-api/src/main/java/org/dspace/google/GoogleRecorderEventListener.java index ec86e5b410b3..4159661b1ced 100644 --- a/dspace-api/src/main/java/org/dspace/google/GoogleRecorderEventListener.java +++ b/dspace-api/src/main/java/org/dspace/google/GoogleRecorderEventListener.java @@ -40,7 +40,9 @@ * Time: 10:05 * * Notify Google Analytics of... well anything we want really. + * @deprecated Use org.dspace.google.GoogleAsyncEventListener instead */ +@Deprecated public class GoogleRecorderEventListener extends AbstractUsageEventListener { private String analyticsKey; diff --git a/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilder.java b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilder.java new file mode 100644 index 000000000000..85f48d610891 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilder.java @@ -0,0 +1,247 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import static java.util.stream.Collectors.groupingBy; +import static org.apache.commons.lang.StringUtils.startsWith; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.dspace.google.GoogleAnalyticsEvent; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link GoogleAnalyticsClientRequestBuilder} that compose + * the request for Google Analytics 4 (GA4). + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GoogleAnalytics4ClientRequestBuilder implements GoogleAnalyticsClientRequestBuilder { + + private final String endpointUrl; + + @Autowired + private ConfigurationService configurationService; + + private ObjectMapper objectMapper = new ObjectMapper(); + + public GoogleAnalytics4ClientRequestBuilder(String endpointUrl) { + this.endpointUrl = endpointUrl; + } + + @Override + public String getEndpointUrl(String analyticsKey) { + + if (!startsWith(analyticsKey, "G-")) { + throw new IllegalArgumentException("Only keys with G- prefix are supported"); + } + + String apiSecret = configurationService.getProperty("google.analytics.api-secret"); + if (StringUtils.isBlank(apiSecret)) { + throw new GoogleAnalyticsClientException("The API secret must be configured to sent GA4 events"); + } + + return endpointUrl + "?api_secret=" + apiSecret + "&measurement_id=" + analyticsKey; + + } + + @Override + public List composeRequestsBody(String analyticsKey, List events) { + + Map> eventsGroupedByClientId = groupByClientId(events); + + List requestsBody = new ArrayList(); + + for (String clientId : eventsGroupedByClientId.keySet()) { + String requestBody = composeRequestBody(clientId, eventsGroupedByClientId.get(clientId)); + requestsBody.add(requestBody); + } + + return requestsBody; + + } + + private Map> groupByClientId(List events) { + return events.stream() + .collect(groupingBy(GoogleAnalyticsEvent::getClientId)); + } + + private String composeRequestBody(String clientId, List events) { + + GoogleAnalytics4EventsVO eventsVo = new GoogleAnalytics4EventsVO(clientId); + + events.stream() + .map(GoogleAnalytics4EventVO::fromGoogleAnalyticsEvent) + .forEach(eventsVo::addEvent); + + return toJsonAsString(eventsVo); + + } + + private String toJsonAsString(GoogleAnalytics4EventsVO eventsVo) { + try { + return objectMapper.writeValueAsString(eventsVo); + } catch (JsonProcessingException e) { + throw new GoogleAnalyticsClientException(e); + } + } + + public void setConfigurationService(ConfigurationService configurationService) { + this.configurationService = configurationService; + } + + /** + * Class that models the json of the events to be write in the body of the GA request. + */ + public static class GoogleAnalytics4EventsVO { + + @JsonProperty("client_id") + private final String clientId; + + private final List events; + + public GoogleAnalytics4EventsVO(String clientId) { + this.clientId = clientId; + this.events = new ArrayList<>(); + } + + public String getClientId() { + return clientId; + } + + public List getEvents() { + return events; + } + + public void addEvent(GoogleAnalytics4EventVO event) { + this.events.add(event); + } + + } + + /** + * Class that model a single event to be sent to GA. + */ + public static class GoogleAnalytics4EventVO { + + private final String name = "item"; + + private final GoogleAnalytics4EventParamsVO params; + + public static GoogleAnalytics4EventVO fromGoogleAnalyticsEvent(GoogleAnalyticsEvent event) { + return new GoogleAnalytics4EventVO(event.getTime(), event.getDocumentTitle(), event.getDocumentPath(), + event.getDocumentReferrer(), event.getUserAgent(), event.getUserIp()); + } + + public GoogleAnalytics4EventVO(long time, String documentTitle, String documentPath, String documentReferrer, + String userAgent, String userIp) { + + this.params = new GoogleAnalytics4EventParamsVO(time, documentTitle, documentPath, + documentReferrer, userAgent, userIp); + } + + public String getName() { + return name; + } + + public GoogleAnalytics4EventParamsVO getParams() { + return params; + } + + } + + /** + * Class that model the params of a specific event to be sent to GA. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ + public static class GoogleAnalytics4EventParamsVO { + + private final String action = "download"; + + private final String category = "bitstream"; + + @JsonInclude(Include.NON_NULL) + private final long time; + + @JsonInclude(Include.NON_NULL) + @JsonProperty("document_title") + private final String documentTitle; + + @JsonInclude(Include.NON_NULL) + @JsonProperty("document_path") + private final String documentPath; + + @JsonInclude(Include.NON_NULL) + @JsonProperty("document_referrer") + private final String documentReferrer; + + @JsonInclude(Include.NON_NULL) + @JsonProperty("user_agent") + private final String userAgent; + + @JsonInclude(Include.NON_NULL) + @JsonProperty("user_ip") + private final String userIp; + + public GoogleAnalytics4EventParamsVO(long time, String documentTitle, String documentPath, + String documentReferrer, String userAgent, String userIp) { + this.time = time; + this.documentTitle = documentTitle; + this.documentPath = documentPath; + this.documentReferrer = documentReferrer; + this.userAgent = userAgent; + this.userIp = userIp; + } + + public long getTime() { + return time; + } + + public String getDocumentTitle() { + return documentTitle; + } + + public String getDocumentPath() { + return documentPath; + } + + public String getDocumentReferrer() { + return documentReferrer; + } + + public String getUserAgent() { + return userAgent; + } + + public String getUserIp() { + return userIp; + } + + public String getAction() { + return action; + } + + public String getCategory() { + return category; + } + + } + +} diff --git a/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClient.java b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClient.java new file mode 100644 index 000000000000..80f64aa5342b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClient.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import java.util.List; + +import org.dspace.google.GoogleAnalyticsEvent; + +/** + * Client to send events to Google Analytics. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface GoogleAnalyticsClient { + + /** + * Check if the client supports the given analytics key. + * + * @param analyticsKey the analytics key + * @return true if the key is supported, false otherwise + */ + boolean isAnalyticsKeySupported(String analyticsKey); + + /** + * Send the given Google Analytics events. + * + * @param analyticsKey the analytics key + * @param events the events to be sent + */ + void sendEvents(String analyticsKey, List events); +} diff --git a/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientException.java b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientException.java new file mode 100644 index 000000000000..a762deed34f9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientException.java @@ -0,0 +1,32 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +/** + * Exception thrown by {@link GoogleAnalyticsClient} during the events sending. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GoogleAnalyticsClientException extends RuntimeException { + + private static final long serialVersionUID = -2248100136404696572L; + + public GoogleAnalyticsClientException(String message, Throwable cause) { + super(message, cause); + } + + public GoogleAnalyticsClientException(String message) { + super(message); + } + + public GoogleAnalyticsClientException(Throwable cause) { + super(cause); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientImpl.java b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientImpl.java new file mode 100644 index 000000000000..b5ee1806cd56 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientImpl.java @@ -0,0 +1,119 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpResponse; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.dspace.google.GoogleAnalyticsEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Implementation of {@link GoogleAnalyticsClient}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GoogleAnalyticsClientImpl implements GoogleAnalyticsClient { + + private static final Logger LOGGER = LoggerFactory.getLogger(GoogleAnalyticsClientImpl.class); + + private final String keyPrefix; + + private final GoogleAnalyticsClientRequestBuilder requestBuilder; + + private final CloseableHttpClient httpclient; + + public GoogleAnalyticsClientImpl(String keyPrefix, GoogleAnalyticsClientRequestBuilder requestBuilder) { + this.keyPrefix = keyPrefix; + this.requestBuilder = requestBuilder; + this.httpclient = HttpClients.createDefault(); + } + + @Override + public boolean isAnalyticsKeySupported(String analyticsKey) { + return StringUtils.startsWith(analyticsKey, keyPrefix); + } + + @Override + public void sendEvents(String analyticsKey, List events) { + + if (!isAnalyticsKeySupported(analyticsKey)) { + throw new IllegalArgumentException("The given analytics key " + analyticsKey + + " is not supported. A key with prefix " + keyPrefix + " is required"); + } + + String endpointUrl = requestBuilder.getEndpointUrl(analyticsKey); + + requestBuilder.composeRequestsBody(analyticsKey, events) + .forEach(requestBody -> sendRequest(endpointUrl, requestBody)); + + } + + private void sendRequest(String endpointUrl, String requestBody) { + + try { + + HttpPost httpPost = new HttpPost(endpointUrl); + httpPost.setEntity(new StringEntity(requestBody)); + + try (CloseableHttpResponse response = httpclient.execute(httpPost)) { + if (isNotSuccessfull(response)) { + throw new GoogleAnalyticsClientException(formatErrorMessage(response)); + } + } + + } catch (GoogleAnalyticsClientException ex) { + throw ex; + } catch (Exception ex) { + throw new GoogleAnalyticsClientException("An error occurs sending events to " + endpointUrl, ex); + } + + } + + private boolean isNotSuccessfull(HttpResponse response) { + int statusCode = getStatusCode(response); + return statusCode < 200 || statusCode > 299; + } + + private int getStatusCode(HttpResponse response) { + return response.getStatusLine().getStatusCode(); + } + + private String formatErrorMessage(HttpResponse response) { + return "Status " + getStatusCode(response) + ". Content: " + getResponseContent(response); + } + + private String getResponseContent(HttpResponse response) { + try { + return IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset()); + } catch (UnsupportedOperationException | IOException e) { + LOGGER.error("An error occurs getting the response content", e); + return "Generic error"; + } + } + + public String getKeyPrefix() { + return keyPrefix; + } + + public GoogleAnalyticsClientRequestBuilder getRequestBuilder() { + return requestBuilder; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientRequestBuilder.java b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientRequestBuilder.java new file mode 100644 index 000000000000..f45eddce4c40 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/GoogleAnalyticsClientRequestBuilder.java @@ -0,0 +1,40 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import java.util.List; + +import org.dspace.google.GoogleAnalyticsEvent; + +/** + * Interface for classes used by {@link GoogleAnalyticsClient} to define the url + * and the body of the requests to be sent to Google Analytics. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface GoogleAnalyticsClientRequestBuilder { + + /** + * Returns the url of the Google Analytics endpoint. + * + * @param analyticsKey the Google Analytics key + * @return the endpoint url + */ + String getEndpointUrl(String analyticsKey); + + /** + * Returns the body of the requests to be sent to Google Analytics as string, + * based on the given analytics key and events. + * + * @param analyticsKey the Google Analytics key + * @param events the events to be sent + * @return the requests body as string + */ + List composeRequestsBody(String analyticsKey, List events); +} diff --git a/dspace-api/src/main/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilder.java b/dspace-api/src/main/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilder.java new file mode 100644 index 000000000000..274c27957e8a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilder.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import static org.apache.commons.lang.StringUtils.startsWith; +import static org.apache.commons.lang3.StringUtils.isNotEmpty; + +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.stream.Collectors; + +import org.dspace.google.GoogleAnalyticsEvent; + +/** + * Implementation of {@link GoogleAnalyticsClientRequestBuilder} that compose + * the request for Universal Analytics (UA). + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class UniversalAnalyticsClientRequestBuilder implements GoogleAnalyticsClientRequestBuilder { + + private final String endpointUrl; + + public UniversalAnalyticsClientRequestBuilder(String endpointUrl) { + this.endpointUrl = endpointUrl; + } + + @Override + public String getEndpointUrl(String analyticsKey) { + return endpointUrl; + } + + @Override + public List composeRequestsBody(String analyticsKey, List events) { + + if (!startsWith(analyticsKey, "UA-")) { + throw new IllegalArgumentException("Only keys with UA- prefix are supported"); + } + + String requestBody = events.stream() + .map(event -> formatEvent(analyticsKey, event)) + .collect(Collectors.joining("\n")); + + return isNotEmpty(requestBody) ? List.of(requestBody) : List.of(); + } + + private String formatEvent(String analyticsKey, GoogleAnalyticsEvent event) { + return "v=1" + + "&tid=" + analyticsKey + + "&cid=" + event.getClientId() + + "&t=event" + + "&uip=" + encodeParameter(event.getUserIp()) + + "&ua=" + encodeParameter(event.getUserAgent()) + + "&dr=" + encodeParameter(event.getDocumentReferrer()) + + "&dp=" + encodeParameter(event.getDocumentPath()) + + "&dt=" + encodeParameter(event.getDocumentTitle()) + + "&qt=" + (System.currentTimeMillis() - event.getTime()) + + "&ec=bitstream" + + "&ea=download" + + "&el=item"; + } + + private String encodeParameter(String parameter) { + return URLEncoder.encode(parameter, StandardCharsets.UTF_8); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/handle/Handle.java b/dspace-api/src/main/java/org/dspace/handle/Handle.java index 76fed105b9be..c35511353a3a 100644 --- a/dspace-api/src/main/java/org/dspace/handle/Handle.java +++ b/dspace-api/src/main/java/org/dspace/handle/Handle.java @@ -105,7 +105,7 @@ public boolean equals(final Object o) { return true; } - if (o == null || getClass() != o.getClass()) { + if (!(o instanceof Handle)) { return false; } diff --git a/dspace-api/src/main/java/org/dspace/handle/HandleServiceImpl.java b/dspace-api/src/main/java/org/dspace/handle/HandleServiceImpl.java index 645b1fdbc443..aa730fe2b115 100644 --- a/dspace-api/src/main/java/org/dspace/handle/HandleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/handle/HandleServiceImpl.java @@ -9,12 +9,14 @@ import java.sql.SQLException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.content.DSpaceObject; import org.dspace.content.service.SiteService; @@ -25,10 +27,9 @@ import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; - /** - * Interface to the CNRI Handle - * System . + * Interface to the CNRI Handle + * System. * *

    * Currently, this class simply maps handles to local facilities; handles which @@ -37,13 +38,12 @@ *

    * * @author Peter Breton - * @version $Revision$ */ public class HandleServiceImpl implements HandleService { /** - * log4j category + * log category */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(HandleServiceImpl.class); + private static final Logger log = LogManager.getLogger(); /** * Prefix registered to no one @@ -84,9 +84,7 @@ public String resolveToURL(Context context, String handle) String url = configurationService.getProperty("dspace.ui.url") + "/handle/" + handle; - if (log.isDebugEnabled()) { - log.debug("Resolved " + handle + " to " + url); - } + log.debug("Resolved {} to {}", handle, url); return url; } @@ -96,7 +94,7 @@ public String resolveUrlToHandle(Context context, String url) throws SQLException { String dspaceUrl = configurationService.getProperty("dspace.ui.url") + "/handle/"; - String handleResolver = configurationService.getProperty("handle.canonical.prefix"); + String handleResolver = getCanonicalPrefix(); String handle = null; @@ -126,10 +124,8 @@ public String getCanonicalPrefix() { // Let the admin define a new prefix, if not then we'll use the // CNRI default. This allows the admin to use "hdl:" if they want to or // use a locally branded prefix handle.myuni.edu. - String handlePrefix = configurationService.getProperty("handle.canonical.prefix"); - if (StringUtils.isBlank(handlePrefix)) { - handlePrefix = "http://hdl.handle.net/"; - } + String handlePrefix = configurationService.getProperty("handle.canonical.prefix", + "https://hdl.handle.net/"); return handlePrefix; } @@ -151,10 +147,10 @@ public String createHandle(Context context, DSpaceObject dso) handle.setResourceTypeId(dso.getType()); handleDAO.save(context, handle); - if (log.isDebugEnabled()) { - log.debug("Created new handle for " - + Constants.typeText[dso.getType()] + " (ID=" + dso.getID() + ") " + handleId); - } + log.debug("Created new handle for {} (ID={}) {}", + () -> Constants.typeText[dso.getType()], + () -> dso.getID(), + () -> handleId); return handleId; } @@ -205,10 +201,10 @@ public String createHandle(Context context, DSpaceObject dso, dso.addHandle(handle); handleDAO.save(context, handle); - if (log.isDebugEnabled()) { - log.debug("Created new handle for " - + Constants.typeText[dso.getType()] + " (ID=" + dso.getID() + ") " + suppliedHandle); - } + log.debug("Created new handle for {} (ID={}) {}", + () -> Constants.typeText[dso.getType()], + () -> dso.getID(), + () -> suppliedHandle); return suppliedHandle; } @@ -216,29 +212,29 @@ public String createHandle(Context context, DSpaceObject dso, @Override public void unbindHandle(Context context, DSpaceObject dso) throws SQLException { - List handles = getInternalHandles(context, dso); - if (CollectionUtils.isNotEmpty(handles)) { - for (Handle handle : handles) { + Iterator handles = dso.getHandles().iterator(); + if (handles.hasNext()) { + while (handles.hasNext()) { + final Handle handle = handles.next(); + handles.remove(); //Only set the "resouce_id" column to null when unbinding a handle. // We want to keep around the "resource_type_id" value, so that we // can verify during a restore whether the same *type* of resource // is reusing this handle! handle.setDSpaceObject(null); - //Also remove the handle from the DSO list to keep a consistent model - dso.getHandles().remove(handle); handleDAO.save(context, handle); - if (log.isDebugEnabled()) { - log.debug("Unbound Handle " + handle.getHandle() + " from object " + Constants.typeText[dso - .getType()] + " id=" + dso.getID()); - } + log.debug("Unbound Handle {} from object {} id={}", + () -> handle.getHandle(), + () -> Constants.typeText[dso.getType()], + () -> dso.getID()); } } else { log.trace( - "Cannot find Handle entry to unbind for object " + Constants.typeText[dso.getType()] + " id=" + dso - .getID() + ". Handle could have been unbinded before."); + "Cannot find Handle entry to unbind for object {} id={}. Handle could have been unbound before.", + Constants.typeText[dso.getType()], dso.getID()); } } @@ -261,7 +257,7 @@ public DSpaceObject resolveToObject(Context context, String handle) @Override public String findHandle(Context context, DSpaceObject dso) throws SQLException { - List handles = getInternalHandles(context, dso); + List handles = dso.getHandles(); if (CollectionUtils.isEmpty(handles)) { return null; } else { @@ -284,7 +280,7 @@ public String findHandle(Context context, DSpaceObject dso) public List getHandlesForPrefix(Context context, String prefix) throws SQLException { List handles = handleDAO.findByPrefix(context, prefix); - List handleStrings = new ArrayList(handles.size()); + List handleStrings = new ArrayList<>(handles.size()); for (Handle handle : handles) { handleStrings.add(handle.getHandle()); } @@ -296,7 +292,7 @@ public String getPrefix() { String prefix = configurationService.getProperty("handle.prefix"); if (StringUtils.isBlank(prefix)) { prefix = EXAMPLE_PREFIX; // XXX no good way to exit cleanly - log.error("handle.prefix is not configured; using " + prefix); + log.error("handle.prefix is not configured; using {}", prefix); } return prefix; } @@ -333,20 +329,6 @@ public void modifyHandleDSpaceObject(Context context, String handle, DSpaceObjec //////////////////////////////////////// // Internal methods //////////////////////////////////////// - - /** - * Return the handle for an Object, or null if the Object has no handle. - * - * @param context DSpace context - * @param dso DSpaceObject for which we require our handles - * @return The handle for object, or null if the object has no handle. - * @throws SQLException If a database error occurs - */ - protected List getInternalHandles(Context context, DSpaceObject dso) - throws SQLException { - return handleDAO.getHandlesByDSpaceObject(context, dso); - } - /** * Find the database row corresponding to handle. * @@ -410,7 +392,7 @@ public String parseHandle(String identifier) { } // Check additional prefixes supported in the config file - String[] additionalPrefixes = configurationService.getArrayProperty("handle.additional.prefixes"); + String[] additionalPrefixes = getAdditionalPrefixes(); for (String additionalPrefix : additionalPrefixes) { if (identifier.startsWith(additionalPrefix + "/")) { // prefix is the equivalent of 123456789 in 123456789/???; don't strip @@ -420,4 +402,9 @@ public String parseHandle(String identifier) { return null; } + + @Override + public String[] getAdditionalPrefixes() { + return configurationService.getArrayProperty("handle.additional.prefixes"); + } } diff --git a/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java b/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java index 133d3dbc2cd3..7fb03376eb5f 100644 --- a/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java +++ b/dspace-api/src/main/java/org/dspace/handle/UpdateHandlePrefix.java @@ -126,7 +126,7 @@ public static void main(String[] args) throws Exception { ); } catch (SQLException sqle) { - if ((context != null) && (context.isValid())) { + if (context.isValid()) { context.abort(); context = null; } diff --git a/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java b/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java index 3bd702bf809c..71bb798ae387 100644 --- a/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/handle/dao/impl/HandleDAOImpl.java @@ -90,13 +90,11 @@ public List findByPrefix(Context context, String prefix) throws SQLExcep @Override public long countHandlesByPrefix(Context context, String prefix) throws SQLException { - - CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); - CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Handle.class); Root handleRoot = criteriaQuery.from(Handle.class); - criteriaQuery.select(criteriaBuilder.count(criteriaQuery.from(Handle.class))); + criteriaQuery.select(handleRoot); criteriaQuery.where(criteriaBuilder.like(handleRoot.get(Handle_.handle), prefix + "%")); return countLong(context, criteriaQuery, criteriaBuilder, handleRoot); } diff --git a/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverDTO.java b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverDTO.java new file mode 100644 index 000000000000..fe50bba813d6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverDTO.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.handle.hdlresolver; + +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.util.Objects; + +import org.apache.commons.lang3.Validate; +import org.dspace.core.Constants; + +/** + * Maps the URL of the request to an handle identifier + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.it) + * + */ +public class HdlResolverDTO { + + private final String[] splittedString; + private final String handle; + + /** + * Decode a given URL + * @param url URL + * @return decoded URL + */ + private static String decode(String url) { + try { + return URLDecoder.decode(url, Constants.DEFAULT_ENCODING); + } catch (UnsupportedEncodingException e) { + return url; + } + } + + /** + * Default Constructor + * + * @param requestURL is the complete Request URL + * @param resolverSubPath is the rest service Sub-path + */ + public HdlResolverDTO(final String requestURL, final String resolverSubPath) { + Validate.notBlank(requestURL, "RequestURI not specified"); + Validate.notBlank(resolverSubPath, "fullPath not specified"); + this.splittedString = requestURL.split(resolverSubPath); + if (Objects.nonNull(splittedString) && splittedString.length > 1) { + // Decodes the URL-encoded characters of the String + this.handle = decode(splittedString[1]); + } else { + this.handle = null; + } + } + + /** + * Returns the splitted String of the resource-path + * + * @return + */ + public final String[] getSplittedString() { + return this.splittedString; + } + + /** + * Returns the handle identifier + * + * @return + */ + public final String getHandle() { + return this.handle; + } + + /** + * Checks if the handle identifier is valid. + * + * @return + */ + public boolean isValid() { + return Objects.nonNull(this.handle) && + !"null".equalsIgnoreCase(this.handle) && + !this.handle.trim().isEmpty(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverService.java b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverService.java new file mode 100644 index 000000000000..3beca5f5dd70 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverService.java @@ -0,0 +1,69 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.handle.hdlresolver; + +import java.util.List; + +import org.dspace.core.Context; + +/** + * Service used to for utilities involving {@code HdlResolverDTO} and its + * resolution to handle URI and vice-versa. + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.it) + * + */ +/** + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public interface HdlResolverService { + + /** + * Method that creates an HdlResovlerDTO using the requestURI (full + * requested handle URI) and the path (REST handler URI) + * + * @param requestURI + * @param path + * @return HdlResolverDTO + */ + HdlResolverDTO resolveBy(String requestURI, String path); + + /** + * Converts the hdlResovler into URL fetching it from repository using the DSpace context + * + * @param context + * @param hdlResolver + * @return URL found or null + */ + String resolveToURL(Context context, HdlResolverDTO hdlResolver); + + /** + * List all available prefixes for this installation + * + * @return `List` of Handle prefixes + */ + List listPrefixes(); + + /** + * List all available handles with `prefix` + * + * @param context DSpace context + * @param prefix prefix to search + * @return `List` of handles + */ + List listHandles(Context context, String prefix); + + /** + * Verifies status of handle controller + * + * @return `true` if enabled, `false` otherwise + */ + boolean isListhandlesEnabled(); + +} diff --git a/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverServiceImpl.java b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverServiceImpl.java new file mode 100644 index 000000000000..3607777322fc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/handle/hdlresolver/HdlResolverServiceImpl.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.handle.hdlresolver; + +import java.sql.SQLException; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.core.Context; +import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * + * Handle Resolver that uses an HandleService to retrieve the right + * URL of a target Handle. + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.it) + * + */ +@Service +public class HdlResolverServiceImpl implements HdlResolverService { + + public static final String LISTHANDLES_HIDE_PROP = "handle.hide.listhandles"; + + private static final Logger log = LogManager.getLogger(); + + @Autowired(required = true) + private HandleService handleService; + + @Autowired(required = true) + private ConfigurationService configurationService; + + @Override + public HdlResolverDTO resolveBy(String requestURI, String path) { + return new HdlResolverDTO(requestURI, path); + } + + @Override + public String resolveToURL(Context context, HdlResolverDTO hdlResolver) { + try { + return this.handleService.resolveToURL(context, hdlResolver.getHandle()); + } catch (SQLException e) { + log.error("Error while resolving Handle: " + hdlResolver.getHandle(), e); + throw new RuntimeException("Error while resolving Handle: " + hdlResolver.getHandle(), e); + } + } + + @Override + public List listPrefixes() { + return Stream.concat( + Stream.of(this.handleService.getAdditionalPrefixes()), + Stream.of(this.handleService.getPrefix()) + ) + .filter(StringUtils::isNotBlank) + .collect(Collectors.toList()); + } + + @Override + public List listHandles(Context context, String prefix) { + List handlesForPrefix = List.of(); + try { + handlesForPrefix = this.handleService.getHandlesForPrefix(context, prefix); + } catch (SQLException e) { + log.error("Error while listing handles for prefix: " + prefix, e); + throw new RuntimeException("Error while listing handles for prefix: " + prefix, e); + } + return handlesForPrefix; + } + + @Override + public boolean isListhandlesEnabled() { + return !this.configurationService.getBooleanProperty(LISTHANDLES_HIDE_PROP); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/handle/service/HandleService.java b/dspace-api/src/main/java/org/dspace/handle/service/HandleService.java index 62dec25587e9..85950ab6db87 100644 --- a/dspace-api/src/main/java/org/dspace/handle/service/HandleService.java +++ b/dspace-api/src/main/java/org/dspace/handle/service/HandleService.java @@ -14,8 +14,8 @@ import org.dspace.core.Context; /** - * Interface to the CNRI Handle - * System . + * Interface to the CNRI Handle + * System. * *

    * Currently, this class simply maps handles to local facilities; handles which @@ -24,7 +24,6 @@ *

    * * @author Peter Breton - * @version $Revision$ */ public interface HandleService { @@ -42,7 +41,6 @@ public interface HandleService { public String resolveToURL(Context context, String handle) throws SQLException; - /** * Try to detect a handle in a URL. * @@ -56,18 +54,18 @@ public String resolveUrlToHandle(Context context, String url) throws SQLException; /** - * Provides handle canonical prefix using http://hdl.handle.net if not + * Provides handle canonical prefix using https://hdl.handle.net if not * overridden by the configuration property handle.canonical.prefix. * * No attempt is made to verify that handle is in fact valid. * - * @param handle The handle + * * @return The canonical form */ public String getCanonicalPrefix(); /** - * Transforms handle into a URI using http://hdl.handle.net if not + * Transforms handle into a URI using https://hdl.handle.net if not * overridden by the configuration property handle.canonical.prefix. * * No attempt is made to verify that handle is in fact valid. @@ -103,16 +101,18 @@ public String createHandle(Context context, DSpaceObject dso, String suppliedHan throws SQLException, IllegalStateException; /** - * Creates a handle entry, but with a handle supplied by the caller (new - * Handle not generated) + * Creates a handle entry, but with a handle supplied by the caller (new Handle + * not generated) * * @param context DSpace context * @param dso DSpaceObject * @param suppliedHandle existing handle value * @param force FIXME: currently unused * @return the Handle - * @throws SQLException An exception that provides information on a database access error or other errors. - * @throws IllegalStateException if specified handle is already in use by another object + * @throws SQLException An exception that provides information on a + * database access error or other errors. + * @throws IllegalStateException if specified handle is already in use by + * another object */ public String createHandle(Context context, DSpaceObject dso, String suppliedHandle, boolean force) throws SQLException, IllegalStateException; @@ -192,4 +192,12 @@ public List getHandlesForPrefix(Context context, String prefix) * @return */ String parseHandle(String identifier); + + /** + * Gets the additional prefixes used for handles, + * mapped in configuration file. + * + * @return `String[]` array of prefixes + */ + String[] getAdditionalPrefixes(); } diff --git a/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java index 88cec74a5816..0ad83a329234 100644 --- a/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/harvest/HarvestedCollectionServiceImpl.java @@ -19,20 +19,20 @@ import java.util.Date; import java.util.List; import javax.xml.parsers.ParserConfigurationException; -import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPathExpressionException; -import ORG.oclc.oai.harvester2.verb.Identify; -import ORG.oclc.oai.harvester2.verb.ListIdentifiers; import org.dspace.content.Collection; import org.dspace.core.Context; import org.dspace.harvest.dao.HarvestedCollectionDAO; import org.dspace.harvest.service.HarvestedCollectionService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.input.DOMBuilder; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.input.DOMBuilder; +import org.oclc.oai.harvester2.verb.Identify; +import org.oclc.oai.harvester2.verb.ListIdentifiers; import org.springframework.beans.factory.annotation.Autowired; import org.w3c.dom.DOMException; import org.xml.sax.SAXException; @@ -198,7 +198,7 @@ public List verifyOAIharvester(String oaiSource, // First, see if we can contact the target server at all. try { new Identify(oaiSource); - } catch (IOException | ParserConfigurationException | TransformerException | SAXException ex) { + } catch (IOException | ParserConfigurationException | XPathExpressionException | SAXException ex) { errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached."); return errorSet; } @@ -216,7 +216,7 @@ public List verifyOAIharvester(String oaiSource, try { OREOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, OAIHarvester.getORENamespace().getURI()); DMDOAIPrefix = OAIHarvester.oaiResolveNamespaceToPrefix(oaiSource, DMD_NS.getURI()); - } catch (IOException | ParserConfigurationException | TransformerException | SAXException ex) { + } catch (IOException | ParserConfigurationException | XPathExpressionException | SAXException ex) { errorSet.add(OAI_ADDRESS_ERROR + ": OAI did not respond to ListMetadataFormats query (" + ORE_NS.getPrefix() + ":" + OREOAIPrefix + " ; " @@ -260,7 +260,8 @@ public List verifyOAIharvester(String oaiSource, } } } - } catch (IOException | ParserConfigurationException | TransformerException | DOMException | SAXException e) { + } catch (IOException | ParserConfigurationException | XPathExpressionException | DOMException | + SAXException e) { errorSet.add(OAI_ADDRESS_ERROR + ": OAI server could not be reached"); return errorSet; } catch (RuntimeException re) { diff --git a/dspace-api/src/main/java/org/dspace/harvest/HarvestedItem.java b/dspace-api/src/main/java/org/dspace/harvest/HarvestedItem.java index 87d2a587496f..343347136bc3 100644 --- a/dspace-api/src/main/java/org/dspace/harvest/HarvestedItem.java +++ b/dspace-api/src/main/java/org/dspace/harvest/HarvestedItem.java @@ -56,6 +56,7 @@ public class HarvestedItem implements ReloadableEntity { protected HarvestedItem() { } + @Override public Integer getID() { return id; } @@ -89,7 +90,6 @@ public String getOaiID() { */ public void setOaiID(String itemOaiID) { this.oaiId = itemOaiID; - return; } diff --git a/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java b/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java index 61385095e845..5aeb40bdd912 100644 --- a/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java +++ b/dspace-api/src/main/java/org/dspace/harvest/OAIHarvester.java @@ -28,13 +28,10 @@ import java.util.Set; import java.util.TimeZone; import javax.xml.parsers.ParserConfigurationException; -import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPathExpressionException; -import ORG.oclc.oai.harvester2.verb.GetRecord; -import ORG.oclc.oai.harvester2.verb.Identify; -import ORG.oclc.oai.harvester2.verb.ListMetadataFormats; -import ORG.oclc.oai.harvester2.verb.ListRecords; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; @@ -70,11 +67,15 @@ import org.dspace.harvest.service.HarvestedItemService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.Namespace; -import org.jdom.input.DOMBuilder; -import org.jdom.output.XMLOutputter; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.input.DOMBuilder; +import org.jdom2.output.XMLOutputter; +import org.oclc.oai.harvester2.verb.GetRecord; +import org.oclc.oai.harvester2.verb.Identify; +import org.oclc.oai.harvester2.verb.ListMetadataFormats; +import org.oclc.oai.harvester2.verb.ListRecords; import org.xml.sax.SAXException; @@ -91,7 +92,7 @@ public class OAIHarvester { /** * log4j category */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(OAIHarvester.class); + private static final Logger log = LogManager.getLogger(); private static final Namespace ATOM_NS = Namespace.getNamespace("http://www.w3.org/2005/Atom"); private static final Namespace ORE_NS = Namespace.getNamespace("http://www.openarchives.org/ore/terms/"); @@ -133,7 +134,7 @@ public class OAIHarvester { private String metadataKey; // DOMbuilder class for the DOM -> JDOM conversions - private static DOMBuilder db = new DOMBuilder(); + private static final DOMBuilder db = new DOMBuilder(); // The point at which this thread should terminate itself /* Initialize the harvester with a collection object */ @@ -331,18 +332,16 @@ public void runHarvest() throws SQLException, IOException, AuthorizeException { // main loop to keep requesting more objects until we're done List records; - Set errorSet = new HashSet(); + Set errorSet = new HashSet<>(); ListRecords listRecords = new ListRecords(oaiSource, fromDate, toDate, oaiSetId, descMDPrefix); log.debug( "Harvesting request parameters: listRecords " + oaiSource + " " + fromDate + " " + toDate + " " + oaiSetId + " " + descMDPrefix); - if (listRecords != null) { - log.info("HTTP Request: " + listRecords.getRequestURL()); - } + log.info("HTTP Request: " + listRecords.getRequestURL()); while (listRecords != null) { - records = new ArrayList(); + records = new ArrayList<>(); oaiResponse = db.build(listRecords.getDocument()); if (listRecords.getErrors() != null && listRecords.getErrors().getLength() > 0) { @@ -376,8 +375,8 @@ public void runHarvest() throws SQLException, IOException, AuthorizeException { } // Process the obtained records - if (records != null && records.size() > 0) { - log.info("Found " + records.size() + " records to process"); + if (!records.isEmpty()) { + log.info("Found {} records to process", records::size); for (Element record : records) { // check for STOP interrupt from the scheduler if (HarvestScheduler.getInterrupt() == HarvestScheduler.HARVESTER_INTERRUPT_STOP) { @@ -439,7 +438,8 @@ public void runHarvest() throws SQLException, IOException, AuthorizeException { harvestRow.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR); harvestedCollectionService.update(ourContext, harvestRow); alertAdmin(HarvestedCollection.STATUS_UNKNOWN_ERROR, ex); - log.error("Error occurred while generating an OAI response: " + ex.getMessage() + " " + ex.getCause(), ex); + log.error("Error occurred while generating an OAI response: {} {}", + ex.getMessage(), ex.getCause(), ex); ourContext.complete(); return; } finally { @@ -455,6 +455,7 @@ public void runHarvest() throws SQLException, IOException, AuthorizeException { harvestRow.setHarvestStartTime(startTime); harvestRow.setHarvestMessage("Harvest from " + oaiSource + " successful"); harvestRow.setHarvestStatus(HarvestedCollection.STATUS_READY); + harvestRow.setLastHarvested(startTime); log.info( "Harvest from " + oaiSource + " successful. The process took " + timeTaken + " milliseconds. Harvested " + currentRecord + " items."); @@ -493,11 +494,11 @@ private void reloadRequiredEntities() throws SQLException { * @throws HarvestingException if harvesting error * @throws ParserConfigurationException XML parsing error * @throws SAXException if XML processing error - * @throws TransformerException if XML transformer error + * @throws XPathExpressionException if XPath error */ protected void processRecord(Element record, String OREPrefix, final long currentRecord, long totalListSize) throws SQLException, AuthorizeException, IOException, CrosswalkException, HarvestingException, - ParserConfigurationException, SAXException, TransformerException { + ParserConfigurationException, SAXException, XPathExpressionException { WorkspaceItem wi = null; Date timeStart = new Date(); @@ -567,11 +568,7 @@ protected void processRecord(Element record, String OREPrefix, final long curren // Import the actual bitstreams if (harvestRow.getHarvestType() == 3) { log.info("Running ORE ingest on: " + item.getHandle()); - - List allBundles = item.getBundles(); - for (Bundle bundle : allBundles) { - itemService.removeBundle(ourContext, item, bundle); - } + itemService.removeAllBundles(ourContext, item); ORExwalk.ingest(ourContext, item, oreREM, true); } } else { @@ -623,7 +620,7 @@ protected void processRecord(Element record, String OREPrefix, final long curren List OREBundles = itemService.getBundles(item, "ORE"); Bitstream OREBitstream = null; - if (OREBundles.size() > 0) { + if (!OREBundles.isEmpty()) { OREBundle = OREBundles.get(0); } else { OREBundle = bundleService.create(ourContext, item, "ORE"); @@ -698,10 +695,10 @@ protected String extractHandle(Item item) { List values = itemService.getMetadata(item, "dc", "identifier", Item.ANY, Item.ANY); - if (values.size() > 0 && acceptedHandleServers != null) { + if (!values.isEmpty() && acceptedHandleServers != null) { for (MetadataValue value : values) { // 0 1 2 3 4 - // http://hdl.handle.net/1234/12 + // https://hdl.handle.net/1234/12 String[] urlPieces = value.getValue().split("/"); if (urlPieces.length != 5) { continue; @@ -732,7 +729,7 @@ protected String extractHandle(Item item) { * @return a string in the format 'yyyy-mm-ddThh:mm:ssZ' and converted to UTC timezone */ private String processDate(Date date) { - Integer timePad = configurationService.getIntProperty("oai.harvester.timePadding"); + int timePad = configurationService.getIntProperty("oai.harvester.timePadding"); if (timePad == 0) { timePad = 120; @@ -769,10 +766,10 @@ private String processDate(Date date, int secondsPad) { * @throws IOException if IO error * @throws SAXException if XML processing error * @throws ParserConfigurationException XML parsing error - * @throws TransformerException if XML transformer error + * @throws XPathExpressionException if XPath error */ private String oaiGetDateGranularity(String oaiSource) - throws IOException, ParserConfigurationException, SAXException, TransformerException { + throws IOException, ParserConfigurationException, SAXException, XPathExpressionException { Identify iden = new Identify(oaiSource); return iden.getDocument().getElementsByTagNameNS(OAI_NS.getURI(), "granularity").item(0).getTextContent(); } @@ -789,26 +786,24 @@ private String oaiGetDateGranularity(String oaiSource) * operations. * @throws ParserConfigurationException XML parsing error * @throws SAXException if XML processing error - * @throws TransformerException if XML transformer error + * @throws XPathExpressionException if XPath error * @throws ConnectException if could not connect to OAI server */ public static String oaiResolveNamespaceToPrefix(String oaiSource, String MDNamespace) - throws IOException, ParserConfigurationException, SAXException, TransformerException, ConnectException { + throws IOException, ParserConfigurationException, SAXException, XPathExpressionException, ConnectException { String metaPrefix = null; // Query the OAI server for the metadata ListMetadataFormats lmf = new ListMetadataFormats(oaiSource); - if (lmf != null) { - Document lmfResponse = db.build(lmf.getDocument()); - List mdFormats = lmfResponse.getRootElement().getChild("ListMetadataFormats", OAI_NS) - .getChildren("metadataFormat", OAI_NS); + Document lmfResponse = db.build(lmf.getDocument()); + List mdFormats = lmfResponse.getRootElement().getChild("ListMetadataFormats", OAI_NS) + .getChildren("metadataFormat", OAI_NS); - for (Element mdFormat : mdFormats) { - if (MDNamespace.equals(mdFormat.getChildText("metadataNamespace", OAI_NS))) { - metaPrefix = mdFormat.getChildText("metadataPrefix", OAI_NS); - break; - } + for (Element mdFormat : mdFormats) { + if (MDNamespace.equals(mdFormat.getChildText("metadataNamespace", OAI_NS))) { + metaPrefix = mdFormat.getChildText("metadataPrefix", OAI_NS); + break; } } @@ -868,15 +863,15 @@ protected void alertAdmin(int status, Exception ex) { * operations. * @throws ParserConfigurationException XML parsing error * @throws SAXException if XML processing error - * @throws TransformerException if XML transformer error + * @throws XPathExpressionException if XPath error * @throws HarvestingException if harvesting error */ protected List getMDrecord(String oaiSource, String itemOaiId, String metadataPrefix) - throws IOException, ParserConfigurationException, SAXException, TransformerException, HarvestingException { + throws IOException, ParserConfigurationException, SAXException, XPathExpressionException, HarvestingException { GetRecord getRecord = new GetRecord(oaiSource, itemOaiId, metadataPrefix); - Set errorSet = new HashSet(); + Set errorSet = new HashSet<>(); // If the metadata is not available for this item, can the whole thing - if (getRecord != null && getRecord.getErrors() != null && getRecord.getErrors().getLength() > 0) { + if (getRecord.getErrors() != null && getRecord.getErrors().getLength() > 0) { for (int i = 0; i < getRecord.getErrors().getLength(); i++) { String errorCode = getRecord.getErrors().item(i).getAttributes().getNamedItem("code").getTextContent(); errorSet.add(errorCode); diff --git a/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java b/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java index 5577f41e6663..e7b456f7b320 100644 --- a/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java +++ b/dspace-api/src/main/java/org/dspace/health/EmbargoCheck.java @@ -26,9 +26,8 @@ public class EmbargoCheck extends Check { @Override public String run(ReportInfo ri) { String ret = ""; - Context context = null; + Context context = new Context(); try { - context = new Context(); Iterator item_iter = null; try { item_iter = embargoService.findItemsByLiftMetadata(context); @@ -56,9 +55,7 @@ public String run(ReportInfo ri) { } catch (SQLException e) { error(e); try { - if (null != context) { - context.abort(); - } + context.abort(); } catch (Exception e1) { error(e); } diff --git a/dspace-api/src/main/java/org/dspace/identifier/DOI.java b/dspace-api/src/main/java/org/dspace/identifier/DOI.java index b73fb2b155ba..e99472e45c78 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/DOI.java +++ b/dspace-api/src/main/java/org/dspace/identifier/DOI.java @@ -34,8 +34,6 @@ public class DOI implements Identifier, ReloadableEntity { public static final String SCHEME = "doi:"; - public static final String RESOLVER = "http://dx.doi.org"; - @Id @Column(name = "doi_id") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "doi_seq") @@ -62,6 +60,7 @@ public class DOI protected DOI() { } + @Override public Integer getID() { return id; } diff --git a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java index c8e9636bcbbb..b70eda960d35 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java @@ -19,12 +19,17 @@ import org.dspace.content.Item; import org.dspace.content.MetadataValue; import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.logic.Filter; +import org.dspace.content.logic.LogicalStatementException; +import org.dspace.content.logic.TrueFilter; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.identifier.doi.DOIConnector; import org.dspace.identifier.doi.DOIIdentifierException; +import org.dspace.identifier.doi.DOIIdentifierNotApplicableException; import org.dspace.identifier.service.DOIService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -41,9 +46,9 @@ *

    Any identifier a method of this class returns is a string in the following format: doi:10.123/456.

    * * @author Pascal-Nicolas Becker + * @author Kim Shepherd */ -public class DOIIdentifierProvider - extends IdentifierProvider { +public class DOIIdentifierProvider extends FilteredIdentifierProvider { private static final Logger log = LoggerFactory.getLogger(DOIIdentifierProvider.class); /** @@ -69,16 +74,44 @@ public class DOIIdentifierProvider public static final String MD_SCHEMA = "dc"; public static final String DOI_ELEMENT = "identifier"; public static final String DOI_QUALIFIER = "uri"; - + // The DOI is queued for registered with the service provider public static final Integer TO_BE_REGISTERED = 1; + // The DOI is queued for reservation with the service provider public static final Integer TO_BE_RESERVED = 2; + // The DOI has been registered online public static final Integer IS_REGISTERED = 3; + // The DOI has been reserved online public static final Integer IS_RESERVED = 4; + // The DOI is reserved and requires an updated metadata record to be sent to the service provider public static final Integer UPDATE_RESERVED = 5; + // The DOI is registered and requires an updated metadata record to be sent to the service provider public static final Integer UPDATE_REGISTERED = 6; + // The DOI metadata record should be updated before performing online registration public static final Integer UPDATE_BEFORE_REGISTRATION = 7; + // The DOI will be deleted locally and marked as deleted in the DOI service provider public static final Integer TO_BE_DELETED = 8; + // The DOI has been deleted and is no longer associated with an item public static final Integer DELETED = 9; + // The DOI is created in the database and is waiting for either successful filter check on item install or + // manual intervention by an administrator to proceed to reservation or registration + public static final Integer PENDING = 10; + // The DOI is created in the database, but no more context is known + public static final Integer MINTED = 11; + + public static final String[] statusText = { + "UNKNOWN", // 0 + "TO_BE_REGISTERED", // 1 + "TO_BE_RESERVED", // 2 + "IS_REGISTERED", // 3 + "IS_RESERVED", // 4 + "UPDATE_RESERVED", // 5 + "UPDATE_REGISTERED", // 6 + "UPDATE_BEFORE_REGISTRATION", // 7 + "TO_BE_DELETED", // 8 + "DELETED", // 9 + "PENDING", // 10 + "MINTED", // 11 + }; @Autowired(required = true) protected DOIService doiService; @@ -87,6 +120,9 @@ public class DOIIdentifierProvider @Autowired(required = true) protected ItemService itemService; + /** + * Empty / default constructor for Spring + */ protected DOIIdentifierProvider() { } @@ -103,6 +139,10 @@ protected DOIIdentifierProvider() { */ private String NAMESPACE_SEPARATOR; + /** + * Get DOI prefix from configuration + * @return a String containing the DOI prefix + */ protected String getPrefix() { if (null == this.PREFIX) { this.PREFIX = this.configurationService.getProperty(CFG_PREFIX); @@ -116,6 +156,10 @@ protected String getPrefix() { return this.PREFIX; } + /** + * Get namespace separator from configuration + * @return a String containing the namespace separator + */ protected String getNamespaceSeparator() { if (null == this.NAMESPACE_SEPARATOR) { this.NAMESPACE_SEPARATOR = this.configurationService.getProperty(CFG_NAMESPACE_SEPARATOR); @@ -126,6 +170,13 @@ protected String getNamespaceSeparator() { return this.NAMESPACE_SEPARATOR; } + /** + * Set the DOI connector, which is the component that commuincates with the remote registration service + * (eg. DataCite, EZID, Crossref) + * Spring will use this setter to set the DOI connector from the configured property in identifier-services.xml + * + * @param connector a DOIConnector + */ @Autowired(required = true) public void setDOIConnector(DOIConnector connector) { this.connector = connector; @@ -164,23 +215,65 @@ public boolean supports(String identifier) { return true; } - + /** + * Register a new identifier for a given DSpaceObject, never skipping or ignoring any configured filter + * @param context - DSpace context + * @param dso - DSpaceObject to use for identifier registration + * @return identifier + * @throws IdentifierException + */ @Override public String register(Context context, DSpaceObject dso) + throws IdentifierException { + return register(context, dso, this.filter); + } + + /** + * Register a specified DOI for a given DSpaceObject, never skipping or ignoring any configured filter + * @param context - DSpace context + * @param dso - DSpaceObject identified by the new DOI + * @param identifier - String containing the identifier to register + * @throws IdentifierException + */ + @Override + public void register(Context context, DSpaceObject dso, String identifier) + throws IdentifierException { + register(context, dso, identifier, this.filter); + } + + /** + * Register a new DOI for a given DSpaceObject + * @param context - DSpace context + * @param dso - DSpaceObject identified by the new DOI + * @param filter - Logical item filter to determine whether this identifier should be registered + * @throws IdentifierException + */ + @Override + public String register(Context context, DSpaceObject dso, Filter filter) throws IdentifierException { if (!(dso instanceof Item)) { // DOI are currently assigned only to Item return null; } - String doi = mint(context, dso); + + String doi = mint(context, dso, filter); + // register tries to reserve doi if it's not already. // So we don't have to reserve it here. - register(context, dso, doi); + register(context, dso, doi, filter); return doi; } + /** + * Register a specified DOI for a given DSpaceObject + * @param context - DSpace context + * @param dso - DSpaceObject identified by the new DOI + * @param identifier - String containing the DOI to register + * @param filter - Logical item filter to determine whether this identifier should be registered + * @throws IdentifierException + */ @Override - public void register(Context context, DSpaceObject dso, String identifier) + public void register(Context context, DSpaceObject dso, String identifier, Filter filter) throws IdentifierException { if (!(dso instanceof Item)) { // DOI are currently assigned only to Item @@ -191,7 +284,7 @@ public void register(Context context, DSpaceObject dso, String identifier) // search DOI in our db try { - doiRow = loadOrCreateDOI(context, dso, doi); + doiRow = loadOrCreateDOI(context, dso, doi, filter); } catch (SQLException ex) { log.error("Error in databse connection: " + ex.getMessage()); throw new RuntimeException("Error in database conncetion.", ex); @@ -200,10 +293,9 @@ public void register(Context context, DSpaceObject dso, String identifier) if (DELETED.equals(doiRow.getStatus()) || TO_BE_DELETED.equals(doiRow.getStatus())) { throw new DOIIdentifierException("You tried to register a DOI that " - + "is marked as DELETED.", DOIIdentifierException.DOI_IS_DELETED); + + "is marked as DELETED.", DOIIdentifierException.DOI_IS_DELETED); } - // Check status of DOI if (IS_REGISTERED.equals(doiRow.getStatus())) { return; } @@ -216,6 +308,7 @@ public void register(Context context, DSpaceObject dso, String identifier) log.warn("SQLException while changing status of DOI {} to be registered.", doi); throw new RuntimeException(sqle); } + } /** @@ -234,14 +327,27 @@ public void register(Context context, DSpaceObject dso, String identifier) */ @Override public void reserve(Context context, DSpaceObject dso, String identifier) + throws IdentifierException, IllegalArgumentException { + reserve(context, dso, identifier, this.filter); + } + + /** + * Reserve a specified DOI for a given DSpaceObject + * @param context - DSpace context + * @param dso - DSpaceObject identified by this DOI + * @param identifier - String containing the DOI to reserve + * @param filter - Logical item filter to determine whether this identifier should be reserved + * @throws IdentifierException + * @throws IllegalArgumentException + */ + @Override + public void reserve(Context context, DSpaceObject dso, String identifier, Filter filter) throws IdentifierException, IllegalArgumentException { String doi = doiService.formatIdentifier(identifier); DOI doiRow = null; try { - // if the doi is in our db already loadOrCreateDOI just returns. - // if it is not loadOrCreateDOI safes the doi. - doiRow = loadOrCreateDOI(context, dso, doi); + doiRow = loadOrCreateDOI(context, dso, doi, filter); } catch (SQLException sqle) { throw new RuntimeException(sqle); } @@ -258,16 +364,40 @@ public void reserve(Context context, DSpaceObject dso, String identifier) } } + /** + * Perform the actual online / API interaction required to reserve the DOI online + * always applying filters if they are configured + * @param context - DSpace context + * @param dso - DSpaceObject identified by this DOI + * @param identifier - String containing the DOI to reserve + * @throws IdentifierException + * @throws IllegalArgumentException + * @throws SQLException + */ public void reserveOnline(Context context, DSpaceObject dso, String identifier) throws IdentifierException, IllegalArgumentException, SQLException { + reserveOnline(context, dso, identifier, this.filter); + } + + /** + * Perform the actual online / API interaction required to reserve the DOI online + * @param context - DSpace context + * @param dso - DSpaceObject identified by this DOI + * @param identifier - String containing the DOI to reserve + * @param filter - Logical item filter to determine whether this identifier should be reserved online + * @throws IdentifierException + * @throws IllegalArgumentException + * @throws SQLException + */ + public void reserveOnline(Context context, DSpaceObject dso, String identifier, Filter filter) + throws IdentifierException, IllegalArgumentException, SQLException { String doi = doiService.formatIdentifier(identifier); // get TableRow and ensure DOI belongs to dso regarding our db - DOI doiRow = loadOrCreateDOI(context, dso, doi); + DOI doiRow = loadOrCreateDOI(context, dso, doi, filter); - if (DELETED.equals(doiRow.getStatus()) || - TO_BE_DELETED.equals(doiRow.getStatus())) { + if (DELETED.equals(doiRow.getStatus()) || TO_BE_DELETED.equals(doiRow.getStatus())) { throw new DOIIdentifierException("You tried to reserve a DOI that " - + "is marked as DELETED.", DOIIdentifierException.DOI_IS_DELETED); + + "is marked as DELETED.", DOIIdentifierException.DOI_IS_DELETED); } connector.reserveDOI(context, dso, doi); @@ -276,16 +406,43 @@ public void reserveOnline(Context context, DSpaceObject dso, String identifier) doiService.update(context, doiRow); } + /** + * Perform the actual online / API interaction required to register the DOI online + * always applying filters if they are configured + * @param context - DSpace context + * @param dso - DSpaceObject identified by this DOI + * @param identifier - String containing the DOI to register + * @throws IdentifierException + * @throws IllegalArgumentException + * @throws SQLException + */ public void registerOnline(Context context, DSpaceObject dso, String identifier) throws IdentifierException, IllegalArgumentException, SQLException { + + registerOnline(context, dso, identifier, this.filter); + + } + + /** + * Perform the actual online / API interaction required to register the DOI online + * @param context - DSpace context + * @param dso - DSpaceObject identified by this DOI + * @param identifier - String containing the DOI to register + * @param filter - Logical item filter to determine whether this identifier should be registered online + * @throws IdentifierException + * @throws IllegalArgumentException + * @throws SQLException + */ + public void registerOnline(Context context, DSpaceObject dso, String identifier, Filter filter) + throws IdentifierException, IllegalArgumentException, SQLException { + String doi = doiService.formatIdentifier(identifier); // get TableRow and ensure DOI belongs to dso regarding our db - DOI doiRow = loadOrCreateDOI(context, dso, doi); + DOI doiRow = loadOrCreateDOI(context, dso, doi, filter); - if (DELETED.equals(doiRow.getStatus()) || - TO_BE_DELETED.equals(doiRow.getStatus())) { + if (DELETED.equals(doiRow.getStatus()) || TO_BE_DELETED.equals(doiRow.getStatus())) { throw new DOIIdentifierException("You tried to register a DOI that " - + "is marked as DELETED.", DOIIdentifierException.DOI_IS_DELETED); + + "is marked as DELETED.", DOIIdentifierException.DOI_IS_DELETED); } // register DOI Online @@ -294,7 +451,7 @@ public void registerOnline(Context context, DSpaceObject dso, String identifier) } catch (DOIIdentifierException die) { // do we have to reserve DOI before we can register it? if (die.getCode() == DOIIdentifierException.RESERVE_FIRST) { - this.reserveOnline(context, dso, identifier); + this.reserveOnline(context, dso, identifier, filter); connector.registerDOI(context, dso, doi); } else { throw die; @@ -314,15 +471,43 @@ public void registerOnline(Context context, DSpaceObject dso, String identifier) doiService.update(context, doiRow); } + /** + * Update metadata for a registered object + * If the DOI for hte item already exists, *always* skip the filter since it should only be used for + * allowing / disallowing reservation and registration, not metadata updates or deletions + * + * @param context - DSpace context + * @param dso - DSpaceObject identified by this DOI + * @param identifier - String containing the DOI to reserve + * @throws IdentifierException + * @throws IllegalArgumentException + * @throws SQLException + */ public void updateMetadata(Context context, DSpaceObject dso, String identifier) - throws IdentifierException, IllegalArgumentException, SQLException { + throws IdentifierException, IllegalArgumentException, SQLException { + String doi = doiService.formatIdentifier(identifier); - DOI doiRow = loadOrCreateDOI(context, dso, doi); + // Use the default filter unless we find the object + Filter updateFilter = this.filter; + + if (doiService.findDOIByDSpaceObject(context, dso) != null) { + // We can skip the filter here since we know the DOI already exists for the item + log.debug("updateMetadata: found DOIByDSpaceObject: " + + doiService.findDOIByDSpaceObject(context, dso).getDoi()); + updateFilter = DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName( + "always_true_filter", TrueFilter.class); + } - if (DELETED.equals(doiRow.getStatus()) || - TO_BE_DELETED.equals(doiRow.getStatus())) { + DOI doiRow = loadOrCreateDOI(context, dso, doi, updateFilter); + + if (PENDING.equals(doiRow.getStatus()) || MINTED.equals(doiRow.getStatus())) { + log.info("Not updating metadata for PENDING or MINTED doi: " + doi); + return; + } + + if (DELETED.equals(doiRow.getStatus()) || TO_BE_DELETED.equals(doiRow.getStatus())) { throw new DOIIdentifierException("You tried to register a DOI that " - + "is marked as DELETED.", DOIIdentifierException.DOI_IS_DELETED); + + "is marked as DELETED.", DOIIdentifierException.DOI_IS_DELETED); } if (IS_REGISTERED.equals(doiRow.getStatus())) { @@ -338,8 +523,20 @@ public void updateMetadata(Context context, DSpaceObject dso, String identifier) doiService.update(context, doiRow); } + /** + * Update metadata for a registered object in the DOI Connector to update the agency records + * If the DOI for hte item already exists, *always* skip the filter since it should only be used for + * allowing / disallowing reservation and registration, not metadata updates or deletions + * + * @param context - DSpace context + * @param dso - DSpaceObject identified by this DOI + * @param identifier - String containing the DOI to reserve + * @throws IdentifierException + * @throws IllegalArgumentException + * @throws SQLException + */ public void updateMetadataOnline(Context context, DSpaceObject dso, String identifier) - throws IdentifierException, SQLException { + throws IdentifierException, SQLException { String doi = doiService.formatIdentifier(identifier); // ensure DOI belongs to dso regarding our db @@ -348,31 +545,28 @@ public void updateMetadataOnline(Context context, DSpaceObject dso, String ident doiRow = doiService.findByDoi(context, doi.substring(DOI.SCHEME.length())); } catch (SQLException sqle) { log.warn("SQLException while searching a DOI in our db.", sqle); - throw new RuntimeException("Unable to retrieve information about " + - "a DOI out of database.", sqle); + throw new RuntimeException("Unable to retrieve information about a DOI out of database.", sqle); } if (null == doiRow) { - log.error("Cannot update metadata for DOI {}: unable to find it in " - + "our db.", doi); + log.error("Cannot update metadata for DOI {}: unable to find it in our db.", doi); throw new DOIIdentifierException("Unable to find DOI.", - DOIIdentifierException.DOI_DOES_NOT_EXIST); + DOIIdentifierException.DOI_DOES_NOT_EXIST); } if (!Objects.equals(doiRow.getDSpaceObject(), dso)) { log.error("Refuse to update metadata of DOI {} with the metadata of " - + " an object ({}/{}) the DOI is not dedicated to.", + + " an object ({}/{}) the DOI is not dedicated to.", doi, contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso), dso.getID().toString()); throw new DOIIdentifierException("Cannot update DOI metadata: " - + "DOI and DSpaceObject does not match!", - DOIIdentifierException.MISMATCH); + + "DOI and DSpaceObject does not match!", + DOIIdentifierException.MISMATCH); } - if (DELETED.equals(doiRow.getStatus()) || - TO_BE_DELETED.equals(doiRow.getStatus())) { + if (DELETED.equals(doiRow.getStatus()) || TO_BE_DELETED.equals(doiRow.getStatus())) { throw new DOIIdentifierException("You tried to update the metadata" - + "of a DOI that is marked as DELETED.", - DOIIdentifierException.DOI_IS_DELETED); + + " of a DOI that is marked as DELETED.", + DOIIdentifierException.DOI_IS_DELETED); } connector.updateMetadata(context, dso, doi); @@ -388,41 +582,69 @@ public void updateMetadataOnline(Context context, DSpaceObject dso, String ident doiService.update(context, doiRow); } + /** + * Mint a new DOI in DSpace - this is usually the first step of registration + * Always apply filters if they are configured + * @param context - DSpace context + * @param dso - DSpaceObject identified by the new identifier + * @return a String containing the new identifier + * @throws IdentifierException + */ @Override public String mint(Context context, DSpaceObject dso) - throws IdentifierException { + throws IdentifierException { + return mint(context, dso, this.filter); + } + + /** + * Mint a new DOI in DSpace - this is usually the first step of registration + * @param context - DSpace context + * @param dso - DSpaceObject identified by the new identifier + * @param filter - Logical item filter to determine whether this identifier should be registered + * @return a String containing the new identifier + * @throws IdentifierException + */ + @Override + public String mint(Context context, DSpaceObject dso, Filter filter) throws IdentifierException { + String doi = null; try { doi = getDOIByObject(context, dso); } catch (SQLException e) { log.error("Error while attemping to retrieve information about a DOI for " - + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso - .getID() + "."); + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso.getID() + "."); throw new RuntimeException("Error while attempting to retrieve " + - "information about a DOI for " + contentServiceFactory - .getDSpaceObjectService(dso).getTypeText(dso) + - " with ID " + dso.getID() + ".", e); + "information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + + " with ID " + dso.getID() + ".", e); } if (null == doi) { try { - DOI doiRow = loadOrCreateDOI(context, dso, null); + DOI doiRow = loadOrCreateDOI(context, dso, null, filter); doi = DOI.SCHEME + doiRow.getDoi(); } catch (SQLException e) { log.error("Error while creating new DOI for Object of " + - "ResourceType {} with id {}.", dso.getType(), dso.getID()); + "ResourceType {} with id {}.", dso.getType(), dso.getID()); throw new RuntimeException("Error while attempting to create a " + - "new DOI for " + contentServiceFactory.getDSpaceObjectService(dso) - .getTypeText(dso) + " with ID " + - dso.getID() + ".", e); + "new DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + + dso.getID() + ".", e); } } return doi; } + /** + * Resolve an identifier to a DSpaceObject, if it is registered + * @param context - DSpace context + * @param identifier - to be resolved. + * @param attributes - additional information for resolving {@code identifier}. + * @return a DSpaceObject identified by the identifier string + * @throws IdentifierNotFoundException + * @throws IdentifierNotResolvableException + */ @Override public DSpaceObject resolve(Context context, String identifier, String... attributes) - throws IdentifierNotFoundException, IdentifierNotResolvableException { + throws IdentifierNotFoundException, IdentifierNotResolvableException { String doi = null; try { doi = doiService.formatIdentifier(identifier); @@ -437,16 +659,23 @@ public DSpaceObject resolve(Context context, String identifier, String... attrib return dso; } catch (SQLException sqle) { log.error("SQLException while searching a DOI in our db.", sqle); - throw new RuntimeException("Unable to retrieve information about " + - "a DOI out of database.", sqle); + throw new RuntimeException("Unable to retrieve information about a DOI out of database.", sqle); } catch (IdentifierException e) { throw new IdentifierNotResolvableException(e); } } + /** + * Look up a DOI identifier for a given DSpaceObject + * @param context - DSpace context + * @param dso - DSpaceObject to look up + * @return a String containing the DOI + * @throws IdentifierNotFoundException + * @throws IdentifierNotResolvableException + */ @Override public String lookup(Context context, DSpaceObject dso) - throws IdentifierNotFoundException, IdentifierNotResolvableException { + throws IdentifierNotFoundException, IdentifierNotResolvableException { String doi = null; try { doi = getDOIByObject(context, dso); @@ -455,18 +684,23 @@ public String lookup(Context context, DSpaceObject dso) } if (null == doi) { - throw new IdentifierNotFoundException("No DOI for DSpaceObject of type " - + contentServiceFactory.getDSpaceObjectService(dso) - .getTypeText(dso) + " with ID " + dso - .getID() + " found."); + throw new IdentifierNotFoundException("No DOI for DSpaceObject of type " + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + + " with ID " + dso.getID() + " found."); } return doi; } + /** + * Delete all DOIs for a DSpaceObject + * @param context - DSpace context + * @param dso - DSpaceObject to have all its DOIs deleted + * @throws IdentifierException + */ @Override public void delete(Context context, DSpaceObject dso) - throws IdentifierException { + throws IdentifierException { // delete all DOIs for this Item from our database. try { String doi = getDOIByObject(context, dso); @@ -475,13 +709,12 @@ public void delete(Context context, DSpaceObject dso) doi = getDOIByObject(context, dso); } } catch (SQLException ex) { - log.error("Error while attemping to retrieve information about a DOI for " - + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso - .getID() + ".", ex); + log.error("Error while attemping to retrieve information about a DOI for " + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + + " with ID " + dso.getID() + ".", ex); throw new RuntimeException("Error while attempting to retrieve " + - "information about a DOI for " + contentServiceFactory - .getDSpaceObjectService(dso).getTypeText(dso) + - " with ID " + dso.getID() + ".", ex); + "information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + + " with ID " + dso.getID() + ".", ex); } // delete all DOIs of this item out of its metadata @@ -493,28 +726,33 @@ public void delete(Context context, DSpaceObject dso) doi = getDOIOutOfObject(dso); } } catch (AuthorizeException ex) { - log.error("Error while removing a DOI out of the metadata of an " - + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso - .getID() + ".", ex); - throw new RuntimeException("Error while removing a DOI out of the " - + "metadata of an " + contentServiceFactory.getDSpaceObjectService(dso) - .getTypeText(dso) + " with ID " - + dso.getID() + ".", ex); + log.error("Error while removing a DOI out of the metadata of an " + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + + " with ID " + dso.getID() + ".", ex); + throw new RuntimeException("Error while removing a DOI out of the metadata of an " + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + + " with ID " + dso.getID() + ".", ex); } catch (SQLException ex) { - log.error("Error while removing a DOI out of the metadata of an " - + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso - .getID() + ".", ex); - throw new RuntimeException("Error while removing a DOI out of the " - + "metadata of an " + contentServiceFactory.getDSpaceObjectService(dso) - .getTypeText(dso) + " with ID " - + dso.getID() + ".", ex); + log.error("Error while removing a DOI out of the metadata of an " + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + + " with ID " + dso.getID() + ".", ex); + throw new RuntimeException("Error while removing a DOI out of the " + + "metadata of an " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + + " with ID " + dso.getID() + ".", ex); } } + /** + * Delete a specific DOI for a given DSpaceObject + * @param context - DSpace context + * @param dso - DSpaceObject to be de-identified. + * @param identifier - String containing identifier to delete + * @throws IdentifierException + */ @Override public void delete(Context context, DSpaceObject dso, String identifier) - throws IdentifierException { + throws IdentifierException { String doi = doiService.formatIdentifier(identifier); DOI doiRow = null; @@ -528,8 +766,8 @@ public void delete(Context context, DSpaceObject dso, String identifier) if (null != doiRow) { if (!Objects.equals(dso, doiRow.getDSpaceObject())) { throw new DOIIdentifierException("Trying to delete a DOI out of " - + "an object that is not addressed by the DOI.", - DOIIdentifierException.MISMATCH); + + "an object that is not addressed by the DOI.", + DOIIdentifierException.MISMATCH); } } @@ -539,12 +777,12 @@ public void delete(Context context, DSpaceObject dso, String identifier) } catch (AuthorizeException ex) { log.error("Not authorized to delete a DOI out of an Item.", ex); throw new DOIIdentifierException("Not authorized to delete DOI.", - ex, DOIIdentifierException.UNAUTHORIZED_METADATA_MANIPULATION); + ex, DOIIdentifierException.UNAUTHORIZED_METADATA_MANIPULATION); } catch (SQLException ex) { log.error("SQLException occurred while deleting a DOI out of an item: " - + ex.getMessage()); + + ex.getMessage()); throw new RuntimeException("Error while deleting a DOI out of the " + - "metadata of an Item " + dso.getID(), ex); + "metadata of an Item " + dso.getID(), ex); } // change doi status in db if necessary. @@ -567,8 +805,13 @@ public void delete(Context context, DSpaceObject dso, String identifier) // DOIS. But it is possible to mark a DOI as "inactive". } - public void deleteOnline(Context context, String identifier) - throws DOIIdentifierException { + /** + * Delete a specific DOI in the registration agency records via the DOI Connector + * @param context - DSpace context + * @param identifier - String containing identifier to delete + * @throws DOIIdentifierException + */ + public void deleteOnline(Context context, String identifier) throws DOIIdentifierException { String doi = doiService.formatIdentifier(identifier); DOI doiRow = null; @@ -579,16 +822,15 @@ public void deleteOnline(Context context, String identifier) } if (null == doiRow) { throw new DOIIdentifierException("This identifier: " + identifier - + " isn't in our database", - DOIIdentifierException.DOI_DOES_NOT_EXIST); + + " isn't in our database", + DOIIdentifierException.DOI_DOES_NOT_EXIST); } if (!TO_BE_DELETED.equals(doiRow.getStatus())) { - log.error("This identifier: {} couldn't be deleted. " - + "Delete it first from metadata.", - DOI.SCHEME + doiRow.getDoi()); + log.error("This identifier: {} couldn't be deleted. Delete it first from metadata.", + DOI.SCHEME + doiRow.getDoi()); throw new IllegalArgumentException("Couldn't delete this identifier:" - + DOI.SCHEME + doiRow.getDoi() - + ". Delete it first from metadata."); + + DOI.SCHEME + doiRow.getDoi() + + ". Delete it first from metadata."); } connector.deleteDOI(context, doi); @@ -603,17 +845,16 @@ public void deleteOnline(Context context, String identifier) /** * Returns a DSpaceObject depending on its DOI. - * - * @param context The relevant DSpace Context. + * @param context the context * @param identifier The DOI in a format that is accepted by * {@link org.dspace.identifier.service.DOIService#formatIdentifier(String)}. * @return Null if the DOI couldn't be found or the associated DSpaceObject. - * @throws SQLException if database error - * @throws DOIIdentifierException If {@code identifier} is null or an empty string. + * @throws SQLException if database error + * @throws DOIIdentifierException If {@code identifier} is null or an empty string. * @throws IllegalArgumentException If the identifier couldn't be recognized as DOI. */ public DSpaceObject getObjectByDOI(Context context, String identifier) - throws SQLException, DOIIdentifierException, IllegalArgumentException { + throws SQLException, DOIIdentifierException, IllegalArgumentException { String doi = doiService.formatIdentifier(identifier); DOI doiRow = doiService.findByDoi(context, doi.substring(DOI.SCHEME.length())); @@ -622,10 +863,9 @@ public DSpaceObject getObjectByDOI(Context context, String identifier) } if (doiRow.getDSpaceObject() == null) { - log.error("Found DOI " + doi + - " in database, but no assigned Object could be found."); + log.error("Found DOI " + doi + " in database, but no assigned Object could be found."); throw new IllegalStateException("Found DOI " + doi + - " in database, but no assigned Object could be found."); + " in database, but no assigned Object could be found."); } return doiRow.getDSpaceObject(); @@ -640,8 +880,7 @@ public DSpaceObject getObjectByDOI(Context context, String identifier) * @return The DOI as String or null if DOI was not found. * @throws SQLException if database error */ - public String getDOIByObject(Context context, DSpaceObject dso) - throws SQLException { + public String getDOIByObject(Context context, DSpaceObject dso) throws SQLException { // String sql = "SELECT * FROM Doi WHERE resource_type_id = ? " + // "AND resource_id = ? AND ((status != ? AND status != ?) OR status IS NULL)"; @@ -651,23 +890,20 @@ public String getDOIByObject(Context context, DSpaceObject dso) } if (doiRow.getDoi() == null) { - log.error("A DOI with an empty doi column was found in the database. DSO-Type: " - + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + ", ID: " + dso - .getID() + "."); - throw new IllegalStateException("A DOI with an empty doi column " + - "was found in the database. DSO-Type: " + contentServiceFactory - .getDSpaceObjectService(dso).getTypeText(dso) + - ", ID: " + dso.getID() + "."); + log.error("A DOI with an empty doi column was found in the database. DSO-Type: " + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + ", ID: " + dso.getID() + "."); + throw new IllegalStateException("A DOI with an empty doi column was found in the database. DSO-Type: " + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + ", ID: " + dso.getID() + "."); } return DOI.SCHEME + doiRow.getDoi(); } /** - * Load a DOI from the database or creates it if it does not exist. This - * method can be used to ensure that a DOI exists in the database and to - * load the appropriate TableRow. As protected method we don't check if the - * DOI is in a decent format, use DOI.formatIdentifier(String) if necessary. + * Load a DOI from the database or creates it if it does not exist. + * This method can be used to ensure that a DOI exists in the database and + * to load the appropriate TableRow. As protected method we don't check if + * the DOI is in a decent format, use DOI.formatIdentifier(String) if necessary. * * @param context The relevant DSpace Context. * @param dso The DSpaceObject the DOI should be loaded or created for. @@ -677,10 +913,37 @@ public String getDOIByObject(Context context, DSpaceObject dso) * @throws SQLException In case of an error using the database. * @throws DOIIdentifierException If {@code doi} is not part of our prefix or * DOI is registered for another object already. + * @throws IdentifierNotApplicableException passed through. */ protected DOI loadOrCreateDOI(Context context, DSpaceObject dso, String doiIdentifier) - throws SQLException, DOIIdentifierException { + throws SQLException, DOIIdentifierException, IdentifierNotApplicableException { + return loadOrCreateDOI(context, dso, doiIdentifier, this.filter); + } + + /** + * Load DOI from database, or create one if it doesn't yet exist. + * We need to distinguish several cases.LoadOrCreate can be called with a + * specified identifier to load or create. It can also be used to create a + * new unspecified identifier. In the latter case doiIdentifier is set null. + * If doiIdentifier is set, we know which doi we should try to load or + * create, but even in such a situation we might be able to find it in the + * database or might have to create it. + * + * @param context - DSpace context + * @param dso - DSpaceObject to identify + * @param doiIdentifier - DOI to load or create (null to mint a new one) + * @param filter - Logical item filter to determine whether this identifier should be registered + * @return + * @throws SQLException + * @throws DOIIdentifierException + * @throws org.dspace.identifier.IdentifierNotApplicableException passed through. + */ + protected DOI loadOrCreateDOI(Context context, DSpaceObject dso, String doiIdentifier, Filter filter) + throws SQLException, DOIIdentifierException, IdentifierNotApplicableException { + DOI doi = null; + + // Was an identifier specified that we shall try to load or create if it is not existing yet? if (null != doiIdentifier) { // we expect DOIs to have the DOI-Scheme except inside the doi table: doiIdentifier = doiIdentifier.substring(DOI.SCHEME.length()); @@ -692,47 +955,55 @@ protected DOI loadOrCreateDOI(Context context, DSpaceObject dso, String doiIdent // doi was deleted, check resource type if (doi.getResourceTypeId() != null && doi.getResourceTypeId() != dso.getType()) { - // doi was assigend to another resource type. Don't + // doi was assigned to another resource type. Don't // reactivate it - throw new DOIIdentifierException("Cannot reassing " - + "previously deleted DOI " + doiIdentifier - + " as the resource types of the object it was " - + "previously assigned to and the object it " - + "shall be assigned to now divert (was: " - + Constants.typeText[doi.getResourceTypeId()] - + ", trying to assign to " - + Constants.typeText[dso.getType()] + ").", - DOIIdentifierException.DOI_IS_DELETED); + throw new DOIIdentifierException("Cannot reassign" + + " previously deleted DOI " + doiIdentifier + + " as the resource types of the object it was" + + " previously assigned to and the object it" + + " shall be assigned to now differ (was: " + + Constants.typeText[doi.getResourceTypeId()] + + ", trying to assign to " + + Constants.typeText[dso.getType()] + ").", + DOIIdentifierException.DOI_IS_DELETED); } else { // reassign doi // nothing to do here, doi will br reassigned after this // if-else-if-else-...-block + // will check if a filter prohibits creation of DOIs after this if-else-block } } else { // doi is assigned to a DSO; is it assigned to our specific dso? // check if DOI already belongs to dso if (dso.getID().equals(doi.getDSpaceObject().getID())) { + // Before we return this, check the filter + checkMintable(context, filter, dso); return doi; } else { throw new DOIIdentifierException("Trying to create a DOI " + - "that is already reserved for another object.", - DOIIdentifierException.DOI_ALREADY_EXISTS); + "that is already reserved for another object.", + DOIIdentifierException.DOI_ALREADY_EXISTS); } } } + // Check if this item is eligible for minting. An IdentifierNotApplicableException will be thrown if not. + checkMintable(context, filter, dso); + // check prefix if (!doiIdentifier.startsWith(this.getPrefix() + "/")) { throw new DOIIdentifierException("Trying to create a DOI " + - "that's not part of our Namespace!", - DOIIdentifierException.FOREIGN_DOI); + "that's not part of our Namespace!", + DOIIdentifierException.FOREIGN_DOI); } if (doi == null) { // prepare new doiRow doi = doiService.create(context); } } else { - // We need to generate a new DOI. + // Check if this item is eligible for minting. An IdentifierNotApplicableException will be thrown if not. + checkMintable(context, filter, dso); + doi = doiService.create(context); doiIdentifier = this.getPrefix() + "/" + this.getNamespaceSeparator() + doi.getID(); @@ -741,11 +1012,11 @@ protected DOI loadOrCreateDOI(Context context, DSpaceObject dso, String doiIdent // prepare new doiRow doi.setDoi(doiIdentifier); doi.setDSpaceObject(dso); - doi.setStatus(null); + doi.setStatus(MINTED); try { doiService.update(context, doi); } catch (SQLException e) { - throw new RuntimeException("Cannot save DOI to databse for unkown reason."); + throw new RuntimeException("Cannot save DOI to database for unknown reason."); } return doi; @@ -758,18 +1029,16 @@ protected DOI loadOrCreateDOI(Context context, DSpaceObject dso, String doiIdent * @return The DOI or null if no DOI was found. * @throws DOIIdentifierException if identifier error */ - public String getDOIOutOfObject(DSpaceObject dso) - throws DOIIdentifierException { + public String getDOIOutOfObject(DSpaceObject dso) throws DOIIdentifierException { // FIXME if (!(dso instanceof Item)) { - throw new IllegalArgumentException("We currently support DOIs for " - + "Items only, not for " + contentServiceFactory - .getDSpaceObjectService(dso).getTypeText(dso) + "."); + throw new IllegalArgumentException("We currently support DOIs for Items only, not for " + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + "."); } Item item = (Item) dso; List metadata = itemService.getMetadata(item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null); - String leftPart = DOI.RESOLVER + SLASH + getPrefix() + SLASH + getNamespaceSeparator(); + String leftPart = doiService.getResolver() + SLASH + getPrefix() + SLASH + getNamespaceSeparator(); for (MetadataValue id : metadata) { if (id.getValue().startsWith(leftPart)) { return doiService.DOIFromExternalFormat(id.getValue()); @@ -789,17 +1058,16 @@ public String getDOIOutOfObject(DSpaceObject dso) * @throws IdentifierException if identifier error */ protected void saveDOIToObject(Context context, DSpaceObject dso, String doi) - throws SQLException, AuthorizeException, IdentifierException { + throws SQLException, AuthorizeException, IdentifierException { // FIXME if (!(dso instanceof Item)) { - throw new IllegalArgumentException("We currently support DOIs for " - + "Items only, not for " + contentServiceFactory - .getDSpaceObjectService(dso).getTypeText(dso) + "."); + throw new IllegalArgumentException("We currently support DOIs for Items only, not for " + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + "."); } Item item = (Item) dso; - itemService - .addMetadata(context, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null, doiService.DOIToExternalForm(doi)); + itemService.addMetadata(context, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null, + doiService.DOIToExternalForm(doi)); try { itemService.update(context, item); } catch (SQLException | AuthorizeException ex) { @@ -821,9 +1089,8 @@ protected void removeDOIFromObject(Context context, DSpaceObject dso, String doi throws AuthorizeException, SQLException, IdentifierException { // FIXME if (!(dso instanceof Item)) { - throw new IllegalArgumentException("We currently support DOIs for " - + "Items only, not for " + contentServiceFactory - .getDSpaceObjectService(dso).getTypeText(dso) + "."); + throw new IllegalArgumentException("We currently support DOIs for Items only, not for " + + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + "."); } Item item = (Item) dso; @@ -838,7 +1105,58 @@ protected void removeDOIFromObject(Context context, DSpaceObject dso, String doi itemService.clearMetadata(context, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null); itemService.addMetadata(context, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null, - remainder); + remainder); itemService.update(context, item); } -} + + /** + * Checks to see if an item can have a DOI minted, using the configured logical filter + * @param context + * @param filter Logical item filter to apply + * @param dso The item to be evaluated + * @throws DOIIdentifierNotApplicableException + */ + @Override + public void checkMintable(Context context, Filter filter, DSpaceObject dso) + throws DOIIdentifierNotApplicableException { + if (filter == null) { + Filter trueFilter = DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName( + "always_true_filter", TrueFilter.class); + // If a null filter was passed, and we have a good default filter to apply, apply it. + // Otherwise, set to TrueFilter which means "no filtering" + if (this.filter != null) { + filter = this.filter; + } else { + filter = trueFilter; + } + } + // If the check fails, an exception will be thrown to be caught by the calling method + if (contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso).equals("ITEM")) { + try { + boolean result = filter.getResult(context, (Item) dso); + log.debug("Result of filter for " + dso.getHandle() + " is " + result); + if (!result) { + throw new DOIIdentifierNotApplicableException("Item " + dso.getHandle() + + " was evaluated as 'false' by the item filter, not minting"); + } + } catch (LogicalStatementException e) { + log.error("Error evaluating item with logical filter: " + e.getLocalizedMessage()); + throw new DOIIdentifierNotApplicableException(e); + } + } else { + log.debug("DOI Identifier Provider: filterService is null (ie. don't prevent DOI minting)"); + } + } + + /** + * Checks to see if an item can have a DOI minted, using the configured logical filter + * @param context + * @param dso The item to be evaluated + * @throws DOIIdentifierNotApplicableException + */ + @Override + public void checkMintable(Context context, DSpaceObject dso) throws DOIIdentifierNotApplicableException { + checkMintable(context, this.filter, dso); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/identifier/DOIServiceImpl.java b/dspace-api/src/main/java/org/dspace/identifier/DOIServiceImpl.java index aca933aab673..99643db33fa0 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/DOIServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/identifier/DOIServiceImpl.java @@ -17,11 +17,13 @@ import org.dspace.identifier.dao.DOIDAO; import org.dspace.identifier.doi.DOIIdentifierException; import org.dspace.identifier.service.DOIService; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; /** - * Service implementation for the DOI object. - * This class is responsible for all business logic calls for the DOI object and is autowired by spring. + * Service implementation for the {@link DOI} object. + * This class is responsible for all business logic calls for the DOI object + * and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -31,6 +33,16 @@ public class DOIServiceImpl implements DOIService { @Autowired(required = true) protected DOIDAO doiDAO; + @Autowired(required = true) + protected ConfigurationService configurationService; + + private static final Pattern DOI_URL_PATTERN + = Pattern.compile("http(s)?://([a-z0-9-.]+)?doi.org(?/.*)", + Pattern.CASE_INSENSITIVE); + private static final String DOI_URL_PATTERN_PATH_GROUP = "path"; + + private static final String RESOLVER_DEFAULT = "https://doi.org"; + protected DOIServiceImpl() { } @@ -66,25 +78,46 @@ public String DOIToExternalForm(String identifier) throws IdentifierException { if (null == identifier) { throw new IllegalArgumentException("Identifier is null.", new NullPointerException()); } + if (identifier.isEmpty()) { throw new IllegalArgumentException("Cannot format an empty identifier."); } - if (identifier.startsWith(DOI.SCHEME)) { - return DOI.RESOLVER + "/" + identifier.substring(DOI.SCHEME.length()); + + String resolver = getResolver(); + + if (identifier.startsWith(DOI.SCHEME)) { // doi:something + StringBuilder result = new StringBuilder(resolver); + if (!resolver.endsWith("/")) { + result.append('/'); + } + result.append(identifier.substring(DOI.SCHEME.length())); + return result.toString(); } - if (identifier.startsWith("10.") && identifier.contains("/")) { - return DOI.RESOLVER + "/" + identifier; + + if (identifier.startsWith("10.") && identifier.contains("/")) { // 10.something + StringBuilder result = new StringBuilder(resolver); + if (!resolver.endsWith("/")) { + result.append('/'); + } + result.append(identifier); + return result.toString(); } - if (identifier.startsWith(DOI.RESOLVER + "/10.")) { + + if (identifier.startsWith(resolver + "/10.")) { // https://doi.org/10.something return identifier; } + Matcher matcher = DOI_URL_PATTERN.matcher(identifier); + if (matcher.matches()) { // various old URL forms + return resolver + matcher.group(DOI_URL_PATTERN_PATH_GROUP); + } + throw new IdentifierException(identifier + "does not seem to be a DOI."); } @Override public String DOIFromExternalFormat(String identifier) throws DOIIdentifierException { - Pattern pattern = Pattern.compile("^" + DOI.RESOLVER + "/+(10\\..*)$"); + Pattern pattern = Pattern.compile("^" + getResolver() + "/+(10\\..*)$"); Matcher matcher = pattern.matcher(identifier); if (matcher.find()) { return DOI.SCHEME + matcher.group(1); @@ -99,18 +132,29 @@ public String formatIdentifier(String identifier) throws DOIIdentifierException if (null == identifier) { throw new IllegalArgumentException("Identifier is null.", new NullPointerException()); } - if (identifier.startsWith(DOI.SCHEME)) { - return identifier; - } + if (identifier.isEmpty()) { throw new IllegalArgumentException("Cannot format an empty identifier."); } - if (identifier.startsWith("10.") && identifier.contains("/")) { + + if (identifier.startsWith(DOI.SCHEME)) { // doi:something + return identifier; + } + + if (identifier.startsWith("10.") && identifier.contains("/")) { // 10.something return DOI.SCHEME + identifier; } - if (identifier.startsWith(DOI.RESOLVER + "/10.")) { - return DOI.SCHEME + identifier.substring(18); + + String resolver = getResolver(); + if (identifier.startsWith(resolver + "/10.")) { //https://doi.org/10.something + return DOI.SCHEME + identifier.substring(resolver.length()); } + + Matcher matcher = DOI_URL_PATTERN.matcher(identifier); + if (matcher.matches()) { // various old URL forms + return DOI.SCHEME + matcher.group(DOI_URL_PATTERN_PATH_GROUP).substring(1); + } + throw new DOIIdentifierException(identifier + "does not seem to be a DOI.", DOIIdentifierException.UNRECOGNIZED); } @@ -126,4 +170,14 @@ public List getSimilarDOIsNotInState(Context context, String doiPattern, Li throws SQLException { return doiDAO.findSimilarNotInState(context, doiPattern, statuses, dsoIsNotNull); } + + @Override + public String getResolver() { + String resolver = configurationService.getProperty("identifier.doi.resolver", + RESOLVER_DEFAULT); + if (resolver.endsWith("/")) { + resolver = resolver.substring(0, resolver.length() - 1); + } + return resolver; + } } diff --git a/dspace-api/src/main/java/org/dspace/identifier/DataCiteXMLCreator.java b/dspace-api/src/main/java/org/dspace/identifier/DataCiteXMLCreator.java index 0ea25ff3a48a..ae2cd248d417 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/DataCiteXMLCreator.java +++ b/dspace-api/src/main/java/org/dspace/identifier/DataCiteXMLCreator.java @@ -23,8 +23,8 @@ import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.ConfigurationService; import org.dspace.utils.DSpace; -import org.jdom.Element; -import org.jdom.output.XMLOutputter; +import org.jdom2.Element; +import org.jdom2.output.XMLOutputter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dspace-api/src/main/java/org/dspace/identifier/FilteredIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/FilteredIdentifierProvider.java new file mode 100644 index 000000000000..c2254fa9a6fd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/identifier/FilteredIdentifierProvider.java @@ -0,0 +1,103 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.identifier; + +import java.sql.SQLException; + +import org.dspace.content.DSpaceObject; +import org.dspace.content.logic.Filter; +import org.dspace.content.logic.TrueFilter; +import org.dspace.core.Context; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * This abstract class adds extra method signatures so that implementing IdentifierProviders can + * handle "skip filter" booleans, so that any configured filters can be skipped and DOI registration forced. + * + * @author Kim Shepherd + * @version $Revision$ + */ +public abstract class FilteredIdentifierProvider extends IdentifierProvider { + + protected Filter filter = DSpaceServicesFactory.getInstance() + .getServiceManager().getServiceByName("always_true_filter", TrueFilter.class); + + /** + * Setter for spring to set the default filter from the property in configuration XML + * @param filter - an object implementing the org.dspace.content.logic.Filter interface + */ + public void setFilter(Filter filter) { + if (filter != null) { + this.filter = filter; + } + } + + /** + * Register a new identifier for a given DSpaceObject + * @param context - DSpace context + * @param dso - DSpaceObject to use for identifier registration + * @param filter - Logical item filter to determine whether this identifier should be registered + * @return identifier + * @throws IdentifierException + */ + public abstract String register(Context context, DSpaceObject dso, Filter filter) + throws IdentifierException; + + /** + * Register a specified identifier for a given DSpaceObject + * @param context - DSpace context + * @param dso - DSpaceObject identified by the new identifier + * @param identifier - String containing the identifier to register + * @param filter - Logical item filter to determine whether this identifier should be registered + * @throws IdentifierException + */ + public abstract void register(Context context, DSpaceObject dso, String identifier, Filter filter) + throws IdentifierException; + + /** + * Reserve a specified identifier for a given DSpaceObject (eg. reserving a DOI online with a registration agency) + * @param context - DSpace context + * @param dso - DSpaceObject identified by this identifier + * @param identifier - String containing the identifier to reserve + * @param filter - Logical item filter to determine whether this identifier should be reserved + * @throws IdentifierException + * @throws IllegalArgumentException + * @throws SQLException + */ + public abstract void reserve(Context context, DSpaceObject dso, String identifier, Filter filter) + throws IdentifierException, IllegalArgumentException, SQLException; + + /** + * Mint a new identifier in DSpace - this is usually the first step of registration + * @param context - DSpace context + * @param dso - DSpaceObject identified by the new identifier + * @param filter - Logical item filter to determine whether this identifier should be registered + * @return a String containing the new identifier + * @throws IdentifierException + */ + public abstract String mint(Context context, DSpaceObject dso, Filter filter) throws IdentifierException; + + /** + * Check configured item filters to see if this identifier is allowed to be minted + * @param context - DSpace context + * @param dso - DSpaceObject to be inspected + * @throws IdentifierException + */ + public abstract void checkMintable(Context context, DSpaceObject dso) throws IdentifierException; + + /** + * Check configured item filters to see if this identifier is allowed to be minted + * @param context - DSpace context + * @param filter - Logical item filter + * @param dso - DSpaceObject to be inspected + * @throws IdentifierException + */ + public abstract void checkMintable(Context context, Filter filter, DSpaceObject dso) throws IdentifierException; + +} diff --git a/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java index 7b55a2dd8ca6..82358362da85 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/HandleIdentifierProvider.java @@ -22,7 +22,7 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -68,16 +68,16 @@ public String register(Context context, DSpaceObject dso) { try { String id = mint(context, dso); - // move canonical to point the latest version + // Populate metadata if (dso instanceof Item || dso instanceof Collection || dso instanceof Community) { - Item item = (Item) dso; - populateHandleMetadata(context, item, id); + populateHandleMetadata(context, dso, id); } return id; } catch (IOException | SQLException | AuthorizeException e) { - log.error( - LogManager.getHeader(context, "Error while attempting to create handle", "Item id: " + dso.getID()), e); + log.error(LogHelper.getHeader(context, + "Error while attempting to create handle", + "Item id: " + dso.getID()), e); throw new RuntimeException("Error while attempting to create identifier for Item id: " + dso.getID(), e); } } @@ -87,12 +87,12 @@ public void register(Context context, DSpaceObject dso, String identifier) { try { handleService.createHandle(context, dso, identifier); if (dso instanceof Item || dso instanceof Collection || dso instanceof Community) { - Item item = (Item) dso; - populateHandleMetadata(context, item, identifier); + populateHandleMetadata(context, dso, identifier); } } catch (IOException | IllegalStateException | SQLException | AuthorizeException e) { - log.error( - LogManager.getHeader(context, "Error while attempting to create handle", "Item id: " + dso.getID()), e); + log.error(LogHelper.getHeader(context, + "Error while attempting to create handle", + "Item id: " + dso.getID()), e); throw new RuntimeException("Error while attempting to create identifier for Item id: " + dso.getID(), e); } } @@ -103,8 +103,9 @@ public void reserve(Context context, DSpaceObject dso, String identifier) { try { handleService.createHandle(context, dso, identifier); } catch (IllegalStateException | SQLException e) { - log.error( - LogManager.getHeader(context, "Error while attempting to create handle", "Item id: " + dso.getID()), e); + log.error(LogHelper.getHeader(context, + "Error while attempting to create handle", + "Item id: " + dso.getID()), e); throw new RuntimeException("Error while attempting to create identifier for Item id: " + dso.getID()); } } @@ -126,8 +127,9 @@ public String mint(Context context, DSpaceObject dso) { try { return handleService.createHandle(context, dso); } catch (SQLException e) { - log.error( - LogManager.getHeader(context, "Error while attempting to create handle", "Item id: " + dso.getID()), e); + log.error(LogHelper.getHeader(context, + "Error while attempting to create handle", + "Item id: " + dso.getID()), e); throw new RuntimeException("Error while attempting to create identifier for Item id: " + dso.getID()); } } @@ -139,7 +141,7 @@ public DSpaceObject resolve(Context context, String identifier, String... attrib identifier = handleService.parseHandle(identifier); return handleService.resolveToObject(context, identifier); } catch (IllegalStateException | SQLException e) { - log.error(LogManager.getHeader(context, "Error while resolving handle to item", "handle: " + identifier), + log.error(LogHelper.getHeader(context, "Error while resolving handle to item", "handle: " + identifier), e); } // throw new IllegalStateException("Unsupported Handle Type " diff --git a/dspace-api/src/main/java/org/dspace/identifier/IdentifierNotApplicableException.java b/dspace-api/src/main/java/org/dspace/identifier/IdentifierNotApplicableException.java new file mode 100644 index 000000000000..6708dc8013a0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/identifier/IdentifierNotApplicableException.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.identifier; + +/** + * + * Thrown when an identifier should not be applied to an item, eg. when it has been filtered by an item filter + * + * + * @author Kim Shepherd + */ +public class IdentifierNotApplicableException extends IdentifierException { + + public IdentifierNotApplicableException() { + super(); + } + + public IdentifierNotApplicableException(String message) { + super(message); + } + + public IdentifierNotApplicableException(String message, Throwable cause) { + super(message, cause); + } + + public IdentifierNotApplicableException(Throwable cause) { + super(cause); + } +} diff --git a/dspace-api/src/main/java/org/dspace/identifier/IdentifierServiceImpl.java b/dspace-api/src/main/java/org/dspace/identifier/IdentifierServiceImpl.java index fd68bce882d8..b98aea24fa08 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/IdentifierServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/identifier/IdentifierServiceImpl.java @@ -10,6 +10,7 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.List; +import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; @@ -17,6 +18,7 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.logic.Filter; import org.dspace.core.Context; import org.dspace.handle.service.HandleService; import org.dspace.identifier.service.IdentifierService; @@ -44,7 +46,6 @@ public class IdentifierServiceImpl implements IdentifierService { protected HandleService handleService; protected IdentifierServiceImpl() { - } @Autowired(required = true) @@ -66,9 +67,13 @@ public void setProviders(List providers) { public void reserve(Context context, DSpaceObject dso) throws AuthorizeException, SQLException, IdentifierException { for (IdentifierProvider service : providers) { - String identifier = service.mint(context, dso); - if (!StringUtils.isEmpty(identifier)) { - service.reserve(context, dso, identifier); + try { + String identifier = service.mint(context, dso); + if (!StringUtils.isEmpty(identifier)) { + service.reserve(context, dso, identifier); + } + } catch (IdentifierNotApplicableException e) { + log.warn("Identifier not reserved (inapplicable): " + e.getMessage()); } } //Update our item @@ -81,7 +86,11 @@ public void reserve(Context context, DSpaceObject dso, String identifier) // Next resolve all other services for (IdentifierProvider service : providers) { if (service.supports(identifier)) { - service.reserve(context, dso, identifier); + try { + service.reserve(context, dso, identifier); + } catch (IdentifierNotApplicableException e) { + log.warn("Identifier not reserved (inapplicable): " + e.getMessage()); + } } } //Update our item @@ -90,33 +99,129 @@ public void reserve(Context context, DSpaceObject dso, String identifier) @Override public void register(Context context, DSpaceObject dso) - throws AuthorizeException, SQLException, IdentifierException { + throws AuthorizeException, SQLException, IdentifierException { //We need to commit our context because one of the providers might require the handle created above // Next resolve all other services for (IdentifierProvider service : providers) { - service.register(context, dso); + try { + service.register(context, dso); + } catch (IdentifierNotApplicableException e) { + log.warn("Identifier not registered (inapplicable): " + e.getMessage()); + } } //Update our item / collection / community contentServiceFactory.getDSpaceObjectService(dso).update(context, dso); } + @Override + public void register(Context context, DSpaceObject dso, Class type, Filter filter) + throws AuthorizeException, SQLException, IdentifierException { + boolean registered = false; + // Iterate all services and register identifiers as appropriate + for (IdentifierProvider service : providers) { + if (service.supports(type)) { + try { + if (service instanceof FilteredIdentifierProvider) { + FilteredIdentifierProvider filteredService = (FilteredIdentifierProvider)service; + filteredService.register(context, dso, filter); + } else { + service.register(context, dso); + } + registered = true; + } catch (IdentifierNotApplicableException e) { + log.warn("Identifier not registered (inapplicable): " + e.getMessage()); + } + } + } + if (!registered) { + throw new IdentifierException("Cannot register identifier: Didn't " + + "find a provider that supports this identifier."); + } + // Update our item / collection / community + contentServiceFactory.getDSpaceObjectService(dso).update(context, dso); + } + + @Override + public void register(Context context, DSpaceObject dso, Class type) + throws AuthorizeException, SQLException, IdentifierException { + boolean registered = false; + // Iterate all services and register identifiers as appropriate + for (IdentifierProvider service : providers) { + if (service.supports(type)) { + try { + service.register(context, dso); + registered = true; + } catch (IdentifierNotApplicableException e) { + log.warn("Identifier not registered (inapplicable): " + e.getMessage()); + } + } + } + if (!registered) { + throw new IdentifierException("Cannot register identifier: Didn't " + + "find a provider that supports this identifier."); + } + // Update our item / collection / community + contentServiceFactory.getDSpaceObjectService(dso).update(context, dso); + } + + @Override + public void register(Context context, DSpaceObject dso, Map, Filter> typeFilters) + throws AuthorizeException, SQLException, IdentifierException { + // Iterate all services and register identifiers as appropriate + for (IdentifierProvider service : providers) { + try { + // If the service supports filtering, look through the map and the first supported class + // we find, set the filter and break. If no filter was seen for this type, just let the provider + // use its own implementation. + if (service instanceof FilteredIdentifierProvider) { + FilteredIdentifierProvider filteredService = (FilteredIdentifierProvider)service; + Filter filter = null; + for (Class type : typeFilters.keySet()) { + if (filteredService.supports(type)) { + filter = typeFilters.get(type); + break; + } + } + if (filter != null) { + // Pass the found filter to the provider + filteredService.register(context, dso, filter); + } else { + // Let the provider use the default filter / behaviour + filteredService.register(context, dso); + } + } else { + service.register(context, dso); + } + } catch (IdentifierNotApplicableException e) { + log.warn("Identifier not registered (inapplicable): " + e.getMessage()); + } + } + // Update our item / collection / community + contentServiceFactory.getDSpaceObjectService(dso).update(context, dso); + } + + + @Override public void register(Context context, DSpaceObject object, String identifier) throws AuthorizeException, SQLException, IdentifierException { - //We need to commit our context because one of the providers might require the handle created above - // Next resolve all other services + // Iterate all services and register identifiers as appropriate boolean registered = false; for (IdentifierProvider service : providers) { if (service.supports(identifier)) { - service.register(context, object, identifier); - registered = true; + try { + service.register(context, object, identifier); + registered = true; + } catch (IdentifierNotApplicableException e) { + log.warn("Identifier not registered (inapplicable): " + e.getMessage()); + } } } if (!registered) { throw new IdentifierException("Cannot register identifier: Didn't " + "find a provider that supports this identifier."); } - //Update our item / collection / community + // pdate our item / collection / community contentServiceFactory.getDSpaceObjectService(object).update(context, object); } @@ -134,9 +239,8 @@ public String lookup(Context context, DSpaceObject dso, Class lookup(Context context, DSpaceObject dso) { List identifiers = new ArrayList<>(); + // Attempt to lookup DSO's identifiers using every available provider + // TODO: We may want to eventually limit providers based on DSO type, as not every provider supports every DSO for (IdentifierProvider service : providers) { try { String result = service.lookup(context, dso); if (!StringUtils.isEmpty(result)) { if (log.isDebugEnabled()) { try { - log.debug("Got an identifier from " - + service.getClass().getCanonicalName() + "."); + log.debug("Got an identifier from " + service.getClass().getCanonicalName() + "."); } catch (NullPointerException ex) { log.debug(ex.getMessage(), ex); } @@ -162,13 +267,14 @@ public List lookup(Context context, DSpaceObject dso) { identifiers.add(result); } } catch (IdentifierNotFoundException ex) { - log.info(service.getClass().getName() + " doesn't find an " + // This IdentifierNotFoundException is NOT logged by default, as some providers do not apply to + // every DSO (e.g. DOIs usually don't apply to EPerson objects). So it is expected some may fail lookup. + log.debug(service.getClass().getName() + " doesn't find an " + "Identifier for " + contentServiceFactory.getDSpaceObjectService(dso) .getTypeText(dso) + ", " + dso.getID().toString() + "."); - log.debug(ex.getMessage(), ex); } catch (IdentifierException ex) { - log.error(ex.getMessage(), ex); + log.error(ex); } } @@ -176,9 +282,9 @@ public List lookup(Context context, DSpaceObject dso) { String handle = dso.getHandle(); if (!StringUtils.isEmpty(handle)) { if (!identifiers.contains(handle) - && !identifiers.contains("hdl:" + handle) - && !identifiers.contains(handleService.getCanonicalForm(handle))) { - // The VerionedHandleIdentifierProvider gets loaded by default + && !identifiers.contains("hdl:" + handle) + && !identifiers.contains(handleService.getCanonicalForm(handle))) { + // The VersionedHandleIdentifierProvider gets loaded by default // it returns handles without any scheme (neither hdl: nor http:). // If the VersionedHandleIdentifierProvider is not loaded, // we adds the handle in way it would. @@ -213,7 +319,6 @@ public DSpaceObject resolve(Context context, String identifier) log.info(service.getClass().getName() + " cannot resolve " + "Identifier " + identifier + ": identifier not " + "found."); - log.debug(ex.getMessage(), ex); } catch (IdentifierException ex) { log.error(ex.getMessage(), ex); } @@ -224,8 +329,7 @@ public DSpaceObject resolve(Context context, String identifier) } @Override - public void delete(Context context, DSpaceObject dso) - throws AuthorizeException, SQLException, IdentifierException { + public void delete(Context context, DSpaceObject dso) throws AuthorizeException, SQLException, IdentifierException { for (IdentifierProvider service : providers) { try { service.delete(context, dso); diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index 5464a0216e53..e5a90907c7b6 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -18,6 +18,7 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.MetadataValue; +import org.dspace.content.logic.Filter; import org.dspace.core.Context; import org.dspace.identifier.doi.DOIConnector; import org.dspace.identifier.doi.DOIIdentifierException; @@ -26,13 +27,14 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; /** * @author Marsa Haoua * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ -public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { +public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implements InitializingBean { /** * log4j category */ @@ -48,8 +50,26 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { @Autowired(required = true) protected VersionHistoryService versionHistoryService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ @Override - public String mint(Context context, DSpaceObject dso) + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedDOIIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + + @Override + public String mint(Context context, DSpaceObject dso) throws IdentifierException { + return mint(context, dso, this.filter); + } + + @Override + public String mint(Context context, DSpaceObject dso, Filter filter) throws IdentifierException { if (!(dso instanceof Item)) { throw new IdentifierException("Currently only Items are supported for DOIs."); @@ -60,7 +80,7 @@ public String mint(Context context, DSpaceObject dso) try { history = versionHistoryService.findByItem(context, item); } catch (SQLException ex) { - throw new RuntimeException("A problem occured while accessing the database.", ex); + throw new RuntimeException("A problem occurred while accessing the database.", ex); } String doi = null; @@ -70,7 +90,7 @@ public String mint(Context context, DSpaceObject dso) return doi; } } catch (SQLException ex) { - log.error("Error while attemping to retrieve information about a DOI for " + log.error("Error while attempting to retrieve information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso.getID() + ".", ex); throw new RuntimeException("Error while attempting to retrieve " @@ -79,6 +99,9 @@ public String mint(Context context, DSpaceObject dso) + " with ID " + dso.getID() + ".", ex); } + // Make a call to the filter here to throw an exception instead of carrying on with removal + creation + checkMintable(context, filter, dso); + // check whether we have a DOI in the metadata and if we have to remove it String metadataDOI = getDOIOutOfObject(dso); if (metadataDOI != null) { @@ -111,7 +134,7 @@ public String mint(Context context, DSpaceObject dso) // ensure DOI exists in our database as well and return. // this also checks that the doi is not assigned to another dso already. try { - loadOrCreateDOI(context, dso, versionedDOI); + loadOrCreateDOI(context, dso, versionedDOI, filter); } catch (SQLException ex) { log.error( "A problem with the database connection occurd while processing DOI " + versionedDOI + ".", ex); @@ -125,9 +148,9 @@ public String mint(Context context, DSpaceObject dso) if (history != null) { // versioning is currently supported for items only // if we have a history, we have a item - doi = makeIdentifierBasedOnHistory(context, dso, history); + doi = makeIdentifierBasedOnHistory(context, dso, history, filter); } else { - doi = loadOrCreateDOI(context, dso, null).getDoi(); + doi = loadOrCreateDOI(context, dso, null, filter).getDoi(); } } catch (SQLException ex) { log.error("SQLException while creating a new DOI: ", ex); @@ -136,11 +159,31 @@ public String mint(Context context, DSpaceObject dso) log.error("AuthorizationException while creating a new DOI: ", ex); throw new IdentifierException(ex); } + return doi.startsWith(DOI.SCHEME) ? doi : DOI.SCHEME + doi; + } + + @Override + public void register(Context context, DSpaceObject dso, String identifier) throws IdentifierException { + register(context, dso, identifier, this.filter); + } + + @Override + public String register(Context context, DSpaceObject dso, Filter filter) + throws IdentifierException { + if (!(dso instanceof Item)) { + // DOIs are currently assigned only to Items + return null; + } + + String doi = mint(context, dso, filter); + + register(context, dso, doi, filter); + return doi; } @Override - public void register(Context context, DSpaceObject dso, String identifier) + public void register(Context context, DSpaceObject dso, String identifier, Filter filter) throws IdentifierException { if (!(dso instanceof Item)) { throw new IdentifierException("Currently only Items are supported for DOIs."); @@ -148,7 +191,7 @@ public void register(Context context, DSpaceObject dso, String identifier) Item item = (Item) dso; if (StringUtils.isEmpty(identifier)) { - identifier = mint(context, dso); + identifier = mint(context, dso, filter); } String doiIdentifier = doiService.formatIdentifier(identifier); @@ -156,10 +199,10 @@ public void register(Context context, DSpaceObject dso, String identifier) // search DOI in our db try { - doi = loadOrCreateDOI(context, dso, doiIdentifier); + doi = loadOrCreateDOI(context, dso, doiIdentifier, filter); } catch (SQLException ex) { - log.error("Error in databse connection: " + ex.getMessage(), ex); - throw new RuntimeException("Error in database conncetion.", ex); + log.error("Error in database connection: " + ex.getMessage(), ex); + throw new RuntimeException("Error in database connection.", ex); } if (DELETED.equals(doi.getStatus()) || @@ -220,9 +263,15 @@ protected String getDOIPostfix(String identifier) return doiPostfix; } - // Should never return null! protected String makeIdentifierBasedOnHistory(Context context, DSpaceObject dso, VersionHistory history) - throws AuthorizeException, SQLException, DOIIdentifierException { + throws AuthorizeException, SQLException, DOIIdentifierException, IdentifierNotApplicableException { + return makeIdentifierBasedOnHistory(context, dso, history, this.filter); + } + + // Should never return null! + protected String makeIdentifierBasedOnHistory(Context context, DSpaceObject dso, VersionHistory history, + Filter filter) + throws AuthorizeException, SQLException, DOIIdentifierException, IdentifierNotApplicableException { // Mint foreach new version an identifier like: 12345/100.versionNumber // use the bare handle (g.e. 12345/100) for the first version. @@ -244,6 +293,9 @@ protected String makeIdentifierBasedOnHistory(Context context, DSpaceObject dso, } if (previousVersionDOI == null) { + // Before continuing with any new DOI creation, apply the filter + checkMintable(context, filter, dso); + // We need to generate a new DOI. DOI doi = doiService.create(context); @@ -261,7 +313,6 @@ protected String makeIdentifierBasedOnHistory(Context context, DSpaceObject dso, doiService.update(context, doi); return doi.getDoi(); } - assert (previousVersionDOI != null); String identifier = getBareDOI(previousVersionDOI); @@ -270,7 +321,7 @@ protected String makeIdentifierBasedOnHistory(Context context, DSpaceObject dso, String.valueOf(versionHistoryService.getVersion(context, history, item).getVersionNumber())); } - loadOrCreateDOI(context, dso, identifier); + loadOrCreateDOI(context, dso, identifier, filter); return identifier; } diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java index 193f74789506..4f9efd220695 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java @@ -27,7 +27,7 @@ import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -35,6 +35,7 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -45,7 +46,7 @@ * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ @Component -public class VersionedHandleIdentifierProvider extends IdentifierProvider { +public class VersionedHandleIdentifierProvider extends IdentifierProvider implements InitializingBean { /** * log4j category */ @@ -71,6 +72,19 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider { @Autowired(required = true) protected ContentServiceFactory contentServiceFactory; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); @@ -89,7 +103,7 @@ public String register(Context context, DSpaceObject dso) { populateHandleMetadata(context, dso, id); } } catch (IOException | SQLException | AuthorizeException e) { - log.error(LogManager.getHeader(context, "Error while attempting to create handle", + log.error(LogHelper.getHeader(context, "Error while attempting to create handle", "Item id: " + (dso != null ? dso.getID() : "")), e); throw new RuntimeException( "Error while attempting to create identifier for Item id: " + (dso != null ? dso.getID() : "")); @@ -240,8 +254,9 @@ public void reserve(Context context, DSpaceObject dso, String identifier) { try { handleService.createHandle(context, dso, identifier); } catch (IllegalStateException | SQLException e) { - log.error( - LogManager.getHeader(context, "Error while attempting to create handle", "Item id: " + dso.getID()), e); + log.error(LogHelper.getHeader(context, + "Error while attempting to create handle", + "Item id: " + dso.getID()), e); throw new RuntimeException("Error while attempting to create identifier for Item id: " + dso.getID()); } } @@ -274,8 +289,9 @@ public String mint(Context context, DSpaceObject dso) { } return handleId; } catch (SQLException | AuthorizeException e) { - log.error( - LogManager.getHeader(context, "Error while attempting to create handle", "Item id: " + dso.getID()), e); + log.error(LogHelper.getHeader(context, + "Error while attempting to create handle", + "Item id: " + dso.getID()), e); throw new RuntimeException("Error while attempting to create identifier for Item id: " + dso.getID()); } } @@ -287,7 +303,7 @@ public DSpaceObject resolve(Context context, String identifier, String... attrib identifier = handleService.parseHandle(identifier); return handleService.resolveToObject(context, identifier); } catch (IllegalStateException | SQLException e) { - log.error(LogManager.getHeader(context, "Error while resolving handle to item", "handle: " + identifier), + log.error(LogHelper.getHeader(context, "Error while resolving handle to item", "handle: " + identifier), e); } return null; diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index e15abc43b4bd..9993f78b4dd5 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -22,7 +22,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.handle.service.HandleService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -30,6 +30,7 @@ import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -39,7 +40,8 @@ * @author Ben Bosman (ben at atmire dot com) */ @Component -public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider { +public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider + implements InitializingBean { /** * log4j category */ @@ -65,6 +67,19 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident @Autowired(required = true) private ItemService itemService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); @@ -80,11 +95,11 @@ public String register(Context context, DSpaceObject dso) { String id = mint(context, dso); // move canonical to point the latest version - if (dso != null && dso.getType() == Constants.ITEM) { + if (dso.getType() == Constants.ITEM && dso instanceof Item) { Item item = (Item) dso; - VersionHistory history = null; + VersionHistory history; try { - history = versionHistoryService.findByItem(context, (Item) dso); + history = versionHistoryService.findByItem(context, item); } catch (SQLException ex) { throw new RuntimeException("A problem with the database connection occured.", ex); } @@ -117,7 +132,7 @@ public String register(Context context, DSpaceObject dso) { // check if we have a previous item if (previous != null) { try { - // If we have a reviewer he/she might not have the + // If we have a reviewer they might not have the // rights to edit the metadata of thes previous item. // Temporarly grant them: context.turnOffAuthorisationSystem(); @@ -165,49 +180,51 @@ public String register(Context context, DSpaceObject dso) { @Override public void register(Context context, DSpaceObject dso, String identifier) { try { - - Item item = (Item) dso; - - // if for this identifier is already present a record in the Handle table and the corresponding item - // has an history someone is trying to restore the latest version for the item. When - // trying to restore the latest version the identifier in input doesn't have the for 1234/123.latestVersion - // it is the canonical 1234/123 - VersionHistory itemHistory = getHistory(context, identifier); - if (!identifier.matches(".*/.*\\.\\d+") && itemHistory != null) { - - int newVersionNumber = versionHistoryService.getLatestVersion(context, itemHistory) - .getVersionNumber() + 1; - String canonical = identifier; - identifier = identifier.concat(".").concat("" + newVersionNumber); - restoreItAsVersion(context, dso, identifier, item, canonical, itemHistory); - } else if (identifier.matches(".*/.*\\.\\d+")) { - // if identifier == 1234.5/100.4 reinstate the version 4 in the version table if absent - - // if it is a version of an item is needed to put back the record - // in the versionitem table - String canonical = getCanonical(identifier); - DSpaceObject canonicalItem = this.resolve(context, canonical); - if (canonicalItem == null) { - restoreItAsCanonical(context, dso, identifier, item, canonical); - } else { - VersionHistory history = versionHistoryService.findByItem(context, (Item) canonicalItem); - if (history == null) { + if (dso instanceof Item) { + Item item = (Item) dso; + // if this identifier is already present in the Handle table and the corresponding item + // has a history, then someone is trying to restore the latest version for the item. When + // trying to restore the latest version, the identifier in input doesn't have the + // 1234/123.latestVersion. Instead, it is the canonical 1234/123 + VersionHistory itemHistory = getHistory(context, identifier); + if (!identifier.matches(".*/.*\\.\\d+") && itemHistory != null) { + + int newVersionNumber = versionHistoryService.getLatestVersion(context, itemHistory) + .getVersionNumber() + 1; + String canonical = identifier; + identifier = identifier.concat(".").concat("" + newVersionNumber); + restoreItAsVersion(context, dso, identifier, item, canonical, itemHistory); + } else if (identifier.matches(".*/.*\\.\\d+")) { + // if identifier == 1234.5/100.4 reinstate the version 4 in the version table if absent + + // if it is a version of an item is needed to put back the record + // in the versionitem table + String canonical = getCanonical(identifier); + DSpaceObject canonicalItem = this.resolve(context, canonical); + if (canonicalItem == null) { restoreItAsCanonical(context, dso, identifier, item, canonical); } else { - restoreItAsVersion(context, dso, identifier, item, canonical, history); + VersionHistory history = versionHistoryService.findByItem(context, (Item) canonicalItem); + if (history == null) { + restoreItAsCanonical(context, dso, identifier, item, canonical); + } else { + restoreItAsVersion(context, dso, identifier, item, canonical, history); + } } + } else { + // A regular handle to create for an Item + createNewIdentifier(context, dso, identifier); + modifyHandleMetadata(context, item, getCanonical(identifier)); } } else { - //A regular handle + // Handle being registered for a different type of object (e.g. Collection or Community) createNewIdentifier(context, dso, identifier); - if (dso instanceof Item) { - modifyHandleMetadata(context, item, getCanonical(identifier)); - } } } catch (IOException | SQLException | AuthorizeException e) { - log.error( - LogManager.getHeader(context, "Error while attempting to create handle", "Item id: " + dso.getID()), e); + log.error(LogHelper.getHeader(context, + "Error while attempting to create handle", + "Item id: " + dso.getID()), e); throw new RuntimeException("Error while attempting to create identifier for Item id: " + dso.getID(), e); } } @@ -259,8 +276,9 @@ public void reserve(Context context, DSpaceObject dso, String identifier) { try { handleService.createHandle(context, dso, identifier); } catch (IllegalStateException | SQLException e) { - log.error( - LogManager.getHeader(context, "Error while attempting to create handle", "Item id: " + dso.getID()), e); + log.error(LogHelper.getHeader(context, + "Error while attempting to create handle", + "Item id: " + dso.getID()), e); throw new RuntimeException("Error while attempting to create identifier for Item id: " + dso.getID()); } } @@ -293,8 +311,9 @@ public String mint(Context context, DSpaceObject dso) { } return handleId; } catch (SQLException | AuthorizeException e) { - log.error( - LogManager.getHeader(context, "Error while attempting to create handle", "Item id: " + dso.getID()), e); + log.error(LogHelper.getHeader(context, + "Error while attempting to create handle", + "Item id: " + dso.getID()), e); throw new RuntimeException("Error while attempting to create identifier for Item id: " + dso.getID()); } } @@ -303,9 +322,10 @@ public String mint(Context context, DSpaceObject dso) { public DSpaceObject resolve(Context context, String identifier, String... attributes) { // We can do nothing with this, return null try { + identifier = handleService.parseHandle(identifier); return handleService.resolveToObject(context, identifier); } catch (IllegalStateException | SQLException e) { - log.error(LogManager.getHeader(context, "Error while resolving handle to item", "handle: " + identifier), + log.error(LogHelper.getHeader(context, "Error while resolving handle to item", "handle: " + identifier), e); } return null; @@ -353,8 +373,9 @@ public void delete(Context context, DSpaceObject dso) throws IdentifierException } } } catch (RuntimeException | SQLException e) { - log.error( - LogManager.getHeader(context, "Error while attempting to register doi", "Item id: " + dso.getID()), e); + log.error(LogHelper.getHeader(context, + "Error while attempting to register doi", + "Item id: " + dso.getID()), e); throw new IdentifierException("Error while moving doi identifier", e); } @@ -422,6 +443,19 @@ protected String makeIdentifierBasedOnHistory(Context context, DSpaceObject dso, } } + DSpaceObject itemWithCanonicalHandle = handleService.resolveToObject(context, canonical); + if (itemWithCanonicalHandle != null) { + if (itemWithCanonicalHandle.getID() != previous.getItem().getID()) { + log.warn("The previous version's item (" + previous.getItem().getID() + + ") does not match with the item containing handle " + canonical + + " (" + itemWithCanonicalHandle.getID() + ")"); + } + // Move the original handle from whatever item it's on to the newest version + handleService.modifyHandleDSpaceObject(context, canonical, dso); + } else { + handleService.createHandle(context, dso, canonical); + } + // add a new Identifier for this item: 12345/100.x String idNew = canonical + DOT + version.getVersionNumber(); //Make sure we don't have an old handle hanging around (if our previous version was deleted in the workspace) diff --git a/dspace-api/src/main/java/org/dspace/identifier/dao/impl/DOIDAOImpl.java b/dspace-api/src/main/java/org/dspace/identifier/dao/impl/DOIDAOImpl.java index 019e89c12974..784fec1d8894 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/dao/impl/DOIDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/identifier/dao/impl/DOIDAOImpl.java @@ -8,7 +8,7 @@ package org.dspace.identifier.dao.impl; import java.sql.SQLException; -import java.util.LinkedList; +import java.util.ArrayList; import java.util.List; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; @@ -24,7 +24,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the DOI object. - * This class is responsible for all database calls for the DOI object and is autowired by spring + * This class is responsible for all database calls for the DOI object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -41,7 +41,7 @@ public DOI findByDoi(Context context, String doi) throws SQLException { Root doiRoot = criteriaQuery.from(DOI.class); criteriaQuery.select(doiRoot); criteriaQuery.where(criteriaBuilder.equal(doiRoot.get(DOI_.doi), doi)); - return uniqueResult(context, criteriaQuery, false, DOI.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, DOI.class); } @Override @@ -52,7 +52,7 @@ public DOI findDOIByDSpaceObject(Context context, DSpaceObject dso, List doiRoot = criteriaQuery.from(DOI.class); criteriaQuery.select(doiRoot); - List listToIncludeInOrPredicate = new LinkedList<>(); + List listToIncludeInOrPredicate = new ArrayList<>(statusToExclude.size() + 1); for (Integer status : statusToExclude) { listToIncludeInOrPredicate.add(criteriaBuilder.notEqual(doiRoot.get(DOI_.status), status)); @@ -75,7 +75,7 @@ public List findByStatus(Context context, List statuses) throws SQ CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, DOI.class); Root doiRoot = criteriaQuery.from(DOI.class); criteriaQuery.select(doiRoot); - List orPredicates = new LinkedList<>(); + List orPredicates = new ArrayList<>(statuses.size()); for (Integer status : statuses) { orPredicates.add(criteriaBuilder.equal(doiRoot.get(DOI_.status), status)); } @@ -92,13 +92,13 @@ public List findSimilarNotInState(Context context, String doi, List doiRoot = criteriaQuery.from(DOI.class); criteriaQuery.select(doiRoot); - List listToIncludeInOrPredicate = new LinkedList<>(); + List listToIncludeInOrPredicate = new ArrayList<>(excludedStatuses.size()); for (Integer status : excludedStatuses) { listToIncludeInOrPredicate.add(criteriaBuilder.notEqual(doiRoot.get(DOI_.status), status)); } - List listToIncludeInAndPredicate = new LinkedList<>(); + List listToIncludeInAndPredicate = new ArrayList<>(); listToIncludeInAndPredicate.add(criteriaBuilder.like(doiRoot.get(DOI_.doi), doi)); listToIncludeInAndPredicate.add(criteriaBuilder.or(listToIncludeInOrPredicate.toArray(new Predicate[] {}))); @@ -107,8 +107,6 @@ public List findSimilarNotInState(Context context, String doi, List dois = doiService .getDOIsByStatus(context, Arrays.asList(DOIIdentifierProvider.TO_BE_RESERVED)); @@ -214,7 +237,6 @@ public static void runCLI(Context context, DOIOrganiser organiser, String[] args } if (line.hasOption('r')) { - try { List dois = doiService .getDOIsByStatus(context, Arrays.asList(DOIIdentifierProvider.TO_BE_REGISTERED)); @@ -229,11 +251,12 @@ public static void runCLI(Context context, DOIOrganiser organiser, String[] args } catch (SQLException ex) { System.err.println("Error in database connection:" + ex.getMessage()); ex.printStackTrace(System.err); + } catch (DOIIdentifierException ex) { + System.err.println("Error registering DOI identifier:" + ex.getMessage()); } } if (line.hasOption('u')) { - try { List dois = doiService.getDOIsByStatus(context, Arrays.asList( DOIIdentifierProvider.UPDATE_BEFORE_REGISTRATION, @@ -255,7 +278,6 @@ public static void runCLI(Context context, DOIOrganiser organiser, String[] args } if (line.hasOption('d')) { - try { List dois = doiService .getDOIsByStatus(context, Arrays.asList(DOIIdentifierProvider.TO_BE_DELETED)); @@ -277,7 +299,6 @@ public static void runCLI(Context context, DOIOrganiser organiser, String[] args } } - if (line.hasOption("reserve-doi")) { String identifier = line.getOptionValue("reserve-doi"); @@ -339,7 +360,14 @@ public static void runCLI(Context context, DOIOrganiser organiser, String[] args } - public void list(String processName, PrintStream out, PrintStream err, Integer... status) { + /** + * list DOIs queued for reservation or registration + * @param processName - process name for display + * @param out - output stream (eg. STDOUT) + * @param err - error output stream (eg. STDERR) + * @param status - status codes + */ + public void list(String processName, PrintStream out, PrintStream err, Integer ... status) { String indent = " "; if (null == out) { out = System.out; @@ -371,15 +399,21 @@ public void list(String processName, PrintStream out, PrintStream err, Integer.. } } - public void register(DOI doiRow) throws SQLException { + /** + * Register DOI with the provider + * @param doiRow - doi to register + * @param filter - logical item filter to override + * @throws SQLException + * @throws DOIIdentifierException + */ + public void register(DOI doiRow, Filter filter) throws SQLException, DOIIdentifierException { DSpaceObject dso = doiRow.getDSpaceObject(); if (Constants.ITEM != dso.getType()) { throw new IllegalArgumentException("Currenty DSpace supports DOIs for Items only."); } try { - provider.registerOnline(context, dso, - DOI.SCHEME + doiRow.getDoi()); + provider.registerOnline(context, dso, DOI.SCHEME + doiRow.getDoi(), filter); if (!quiet) { System.out.println("This identifier: " @@ -438,20 +472,43 @@ public void register(DOI doiRow) throws SQLException { } } - public void reserve(DOI doiRow) throws SQLException { + /** + * Register DOI with the provider + * @param doiRow - doi to register + * @throws SQLException + * @throws DOIIdentifierException + */ + public void register(DOI doiRow) throws SQLException, DOIIdentifierException { + register(doiRow, this.filter); + } + + /** + * Reserve DOI with the provider, + * @param doiRow - doi to reserve + * @throws SQLException + * @throws DOIIdentifierException + */ + public void reserve(DOI doiRow) { + reserve(doiRow, this.filter); + } + + /** + * Reserve DOI with the provider + * @param doiRow - doi to reserve + * @throws SQLException + * @throws DOIIdentifierException + */ + public void reserve(DOI doiRow, Filter filter) { DSpaceObject dso = doiRow.getDSpaceObject(); if (Constants.ITEM != dso.getType()) { - throw new IllegalArgumentException("Currenty DSpace supports DOIs for Items only."); + throw new IllegalArgumentException("Currently DSpace supports DOIs for Items only."); } try { - provider.reserveOnline(context, dso, - DOI.SCHEME + doiRow.getDoi()); + provider.reserveOnline(context, dso, DOI.SCHEME + doiRow.getDoi(), filter); if (!quiet) { - System.out.println("This identifier : " - + DOI.SCHEME + doiRow.getDoi() - + " is successfully reserved."); + System.out.println("This identifier : " + DOI.SCHEME + doiRow.getDoi() + " is successfully reserved."); } } catch (IdentifierException ex) { if (!(ex instanceof DOIIdentifierException)) { @@ -477,16 +534,14 @@ public void reserve(DOI doiRow) throws SQLException { .codeToString(doiIdentifierException.getCode()), ex); if (!quiet) { - System.err.println("It wasn't possible to reserve this identifier: " - + DOI.SCHEME + doiRow.getDoi()); + System.err.println("It wasn't possible to reserve this identifier: " + DOI.SCHEME + doiRow.getDoi()); } } catch (IllegalArgumentException ex) { LOG.error("Database table DOI contains a DOI that is not valid: " + DOI.SCHEME + doiRow.getDoi() + "!", ex); if (!quiet) { - System.err.println("It wasn't possible to reserve this identifier: " - + DOI.SCHEME + doiRow.getDoi()); + System.err.println("It wasn't possible to reserve this identifier: " + DOI.SCHEME + doiRow.getDoi()); } throw new IllegalStateException("Database table DOI contains a DOI " + " that is not valid: " @@ -495,19 +550,21 @@ public void reserve(DOI doiRow) throws SQLException { LOG.error("Error while trying to get data from database", ex); if (!quiet) { - System.err.println("It wasn't possible to reserve this identifier: " - + DOI.SCHEME + doiRow.getDoi()); + System.err.println("It wasn't possible to reserve this identifier: " + DOI.SCHEME + doiRow.getDoi()); } throw new RuntimeException("Error while trying to get data from database", ex); } } + /** + * Update metadata for a DOI + * @param doiRow - DOI to update + */ public void update(DOI doiRow) { DSpaceObject dso = doiRow.getDSpaceObject(); if (Constants.ITEM != dso.getType()) { - throw new IllegalArgumentException("Currenty DSpace supports DOIs " - + "for Items only."); + throw new IllegalArgumentException("Currently DSpace supports DOIs for Items only."); } try { @@ -541,8 +598,7 @@ public void update(DOI doiRow) { .codeToString(doiIdentifierException.getCode()), ex); if (!quiet) { - System.err.println("It wasn't possible to update this identifier: " - + DOI.SCHEME + doiRow.getDoi()); + System.err.println("It wasn't possible to update this identifier: " + DOI.SCHEME + doiRow.getDoi()); } } catch (IllegalArgumentException ex) { @@ -550,8 +606,7 @@ public void update(DOI doiRow) { + DOI.SCHEME + doiRow.getDoi() + "!", ex); if (!quiet) { - System.err.println("It wasn't possible to update this identifier: " - + DOI.SCHEME + doiRow.getDoi()); + System.err.println("It wasn't possible to update this identifier: " + DOI.SCHEME + doiRow.getDoi()); } throw new IllegalStateException("Database table DOI contains a DOI " @@ -562,8 +617,12 @@ public void update(DOI doiRow) { } } - public void delete(String identifier) - throws SQLException { + /** + * Delete a DOI + * @param identifier - DOI to delete + * @throws SQLException + */ + public void delete(String identifier) throws SQLException { String doi = null; DOI doiRow = null; @@ -575,8 +634,7 @@ public void delete(String identifier) doi.substring(DOI.SCHEME.length())); if (null == doiRow) { - throw new IllegalStateException("You specified a valid DOI," - + " that is not stored in our database."); + throw new IllegalStateException("You specified a valid DOI, that is not stored in our database."); } provider.deleteOnline(context, doi); @@ -642,15 +700,14 @@ public DOI resolveToDOI(String identifier) //Check if this Item has an Identifier, mint one if it doesn't if (null == doiRow) { - doi = provider.mint(context, dso); + doi = provider.mint(context, dso, this.filter); doiRow = doiService.findByDoi(context, doi.substring(DOI.SCHEME.length())); return doiRow; } return doiRow; } else { - throw new IllegalStateException("You specified an ItemID, " - + "that is not stored in our database."); + throw new IllegalStateException("You specified an ItemID, that is not stored in our database."); } } @@ -667,7 +724,7 @@ public DOI resolveToDOI(String identifier) doiRow = doiService.findDOIByDSpaceObject(context, dso); if (null == doiRow) { - doi = provider.mint(context, dso); + doi = provider.mint(context, dso, this.filter); doiRow = doiService.findByDoi(context, doi.substring(DOI.SCHEME.length())); } @@ -680,8 +737,7 @@ public DOI resolveToDOI(String identifier) doiRow = doiService.findByDoi(context, doi.substring(DOI.SCHEME.length())); if (null == doiRow) { - throw new IllegalStateException("You specified a valid DOI," - + " that is not stored in our database."); + throw new IllegalStateException("You specified a valid DOI, that is not stored in our database."); } } catch (DOIIdentifierException ex) { // Identifier was not recognized as DOI. @@ -699,6 +755,14 @@ public DOI resolveToDOI(String identifier) return doiRow; } + /** + * Send an alert email to the configured recipient when DOI operations encounter an error + * @param action - action being attempted (eg. reserve, register, update) + * @param dso - DSpaceObject associated with the DOI + * @param doi - DOI for this operation + * @param reason - failure reason or error message + * @throws IOException + */ private void sendAlertMail(String action, DSpaceObject dso, String doi, String reason) throws IOException { String recipient = configurationService.getProperty("alert.recipient"); @@ -728,8 +792,11 @@ private void sendAlertMail(String action, DSpaceObject dso, String doi, String r } } + /** + * Set this runner to be in quiet mode, suppressing console output + */ private void setQuiet() { this.quiet = true; } -} +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java b/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java index bc8ea90957e5..57136d6143bb 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java +++ b/dspace-api/src/main/java/org/dspace/identifier/doi/DataCiteConnector.java @@ -45,13 +45,13 @@ import org.dspace.handle.service.HandleService; import org.dspace.identifier.DOI; import org.dspace.services.ConfigurationService; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.filter.ElementFilter; -import org.jdom.input.SAXBuilder; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.filter.ElementFilter; +import org.jdom2.input.SAXBuilder; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; diff --git a/dspace-api/src/main/java/org/dspace/identifier/service/DOIService.java b/dspace-api/src/main/java/org/dspace/identifier/service/DOIService.java index 9af1fd8a0a41..5bd68a90615f 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/service/DOIService.java +++ b/dspace-api/src/main/java/org/dspace/identifier/service/DOIService.java @@ -17,26 +17,65 @@ import org.dspace.identifier.doi.DOIIdentifierException; /** - * Service interface class for the DOI object. - * The implementation of this class is responsible for all business logic calls for the DOI object and is autowired - * by spring + * Service interface class for the {@link DOI} object. + * The implementation of this class is responsible for all business logic calls + * for the {@link DOI} object and is autowired by Spring. * * @author kevinvandevelde at atmire.com */ public interface DOIService { + /** + * Update a DOI in storage. + * + * @param context current DSpace session. + * @param doi the DOI to persist. + * @throws SQLException passed through. + */ public void update(Context context, DOI doi) throws SQLException; + /** + * Create a new DOI in storage. + * + * @param context current DSpace session. + * @return the new DOI. + * @throws SQLException passed through. + */ public DOI create(Context context) throws SQLException; + /** + * Find a specific DOI in storage. + * + * @param context current DSpace session. + * @param doi string representation of the DOI. + * @return the DOI object found. + * @throws SQLException passed through, can mean none found. + */ public DOI findByDoi(Context context, String doi) throws SQLException; + /** + * Find the DOI assigned to a given DSpace Object. + * + * @param context current DSpace session. + * @param dso The DSpace Object. + * @return the DSO's DOI. + * @throws SQLException passed through. + */ public DOI findDOIByDSpaceObject(Context context, DSpaceObject dso) throws SQLException; + /** + * Find the DOI assigned to a given DSpace Object, unless it has one of a + * given set of statuses. + * + * @param context current DSpace context. + * @param dso the DSpace Object. + * @param statusToExclude uninteresting statuses. + * @return the DSO's DOI. + * @throws SQLException passed through. + */ public DOI findDOIByDSpaceObject(Context context, DSpaceObject dso, List statusToExclude) throws SQLException; - /** * This method helps to convert a DOI into a URL. It takes DOIs in one of * the following formats and returns it as URL (f.e. @@ -49,12 +88,18 @@ public DOI findDOIByDSpaceObject(Context context, DSpaceObject dso, List getDOIsByStatus(Context context, List statuses) throws SQLException; /** - * Find all DOIs that are similar to the specified pattern ant not in the specified states. + * Find all DOIs that are similar to the specified pattern and not in the + * specified states. * * @param context DSpace context * @param doiPattern The pattern, e.g. "10.5072/123.%" @@ -85,4 +138,11 @@ public String formatIdentifier(String identifier) public List getSimilarDOIsNotInState(Context context, String doiPattern, List statuses, boolean dsoIsNotNull) throws SQLException; + + /** + * Get the URL stem of the DOI resolver, e.g. "https://doi.org/". + * + * @return URL to the DOI resolver. + */ + public String getResolver(); } diff --git a/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java b/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java index 74219fc71c75..23005b657508 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java +++ b/dspace-api/src/main/java/org/dspace/identifier/service/IdentifierService.java @@ -9,9 +9,11 @@ import java.sql.SQLException; import java.util.List; +import java.util.Map; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; +import org.dspace.content.logic.Filter; import org.dspace.core.Context; import org.dspace.identifier.Identifier; import org.dspace.identifier.IdentifierException; @@ -92,6 +94,9 @@ void reserve(Context context, DSpaceObject dso, String identifier) throws AuthorizeException, SQLException, IdentifierException; /** + * Used to register newly-minted identifiers. Each provider is responsible + * for creating the appropriate identifier. All providers are interrogated. + * * @param context The relevant DSpace Context. * @param dso DSpace object to be registered * @throws AuthorizeException if authorization error @@ -101,7 +106,53 @@ void reserve(Context context, DSpaceObject dso, String identifier) void register(Context context, DSpaceObject dso) throws AuthorizeException, SQLException, IdentifierException; /** - * Used to Register a specific Identifier (for example a Handle, hdl:1234.5/6) + * + * Register identifiers for a DSO, with a map of logical filters for each Identifier class to apply + * at the time of local registration. + * + * @param context The relevant DSpace Context. + * @param dso DSpace object to be registered + * @param typeFilters If a service supports a given Identifier implementation, apply the associated filter + * @throws AuthorizeException if authorization error + * @throws SQLException if database error + * @throws IdentifierException if identifier error + */ + void register(Context context, DSpaceObject dso, Map, Filter> typeFilters) + throws AuthorizeException, SQLException, IdentifierException; + + /** + * + * Register identifier(s) for the given DSO just with providers that support that Identifier class, and + * apply the given filter if that provider extends FilteredIdentifierProvider + * + * @param context The relevant DSpace Context. + * @param dso DSpace object to be registered + * @param type Type of identifier to register + * @param filter If a service supports a given Identifier implementation, apply this specific filter + * @throws AuthorizeException if authorization error + * @throws SQLException if database error + * @throws IdentifierException if identifier error + */ + void register(Context context, DSpaceObject dso, Class type, Filter filter) + throws AuthorizeException, SQLException, IdentifierException; + + /** + * + * Register identifier(s) for the given DSO just with providers that support that Identifier class, and + * apply the given filter if that provider extends FilteredIdentifierProvider + * + * @param context The relevant DSpace Context. + * @param dso DSpace object to be registered + * @param type Type of identifier to register + * @throws AuthorizeException if authorization error + * @throws SQLException if database error + * @throws IdentifierException if identifier error + */ + void register(Context context, DSpaceObject dso, Class type) + throws AuthorizeException, SQLException, IdentifierException; + + /** + * Used to Register a specific Identifier (for example a Handle, hdl:1234.5/6). * The provider is responsible for detecting and processing the appropriate * identifier. All Providers are interrogated. Multiple providers * can process the same identifier. diff --git a/dspace-api/src/main/java/org/dspace/iiif/IIIFApiQueryService.java b/dspace-api/src/main/java/org/dspace/iiif/IIIFApiQueryService.java new file mode 100644 index 000000000000..4f4543ac7805 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/IIIFApiQueryService.java @@ -0,0 +1,24 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif; + +import org.dspace.content.Bitstream; + +/** + * @author Michael Spalti mspalti@willamette.edu + */ +public interface IIIFApiQueryService { + + /** + * Returns array with canvas height and width + * @param bitstream + * @return + */ + int[] getImageDimensions(Bitstream bitstream); + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/IIIFApiQueryServiceImpl.java b/dspace-api/src/main/java/org/dspace/iiif/IIIFApiQueryServiceImpl.java new file mode 100644 index 000000000000..7c6336ed3c7f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/IIIFApiQueryServiceImpl.java @@ -0,0 +1,72 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif; + +import static org.dspace.iiif.canvasdimension.Util.checkDimensions; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.HttpURLConnection; +import java.net.URL; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Bitstream; +import org.dspace.iiif.util.IIIFSharedUtils; + + +/** + * Queries the configured IIIF image server via the Image API. + * + * @author Michael Spalti mspalti@willamette.edu + */ +public class IIIFApiQueryServiceImpl implements IIIFApiQueryService { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(IIIFApiQueryServiceImpl.class); + + @Override + public int[] getImageDimensions(Bitstream bitstream) { + int[] arr = new int[2]; + String path = IIIFSharedUtils.getInfoJsonPath(bitstream); + URL url; + BufferedReader in = null; + try { + url = new URL(path); + HttpURLConnection con = (HttpURLConnection) url.openConnection(); + con.setRequestMethod("GET"); + in = new BufferedReader( + new InputStreamReader(con.getInputStream())); + String inputLine; + StringBuilder response = new StringBuilder(); + while ((inputLine = in.readLine()) != null) { + response.append(inputLine); + } + JsonNode parent = new ObjectMapper().readTree(response.toString()); + // return dimensions if found. + if (parent.has("width") && parent.has("height")) { + arr[0] = parent.get("width").asInt(); + arr[1] = parent.get("height").asInt(); + return checkDimensions(arr); + } + } catch (IOException e) { + log.error(e.getMessage(), e); + } finally { + if (in != null) { + try { + in.close(); + } catch (IOException e) { + log.error(e.getMessage(), e); + } + } + } + return null; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/CanvasDimensionCLI.java b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/CanvasDimensionCLI.java new file mode 100644 index 000000000000..c7feea4c56ee --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/CanvasDimensionCLI.java @@ -0,0 +1,237 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.canvasdimension; + +import java.util.Arrays; +import java.util.Date; +import java.util.UUID; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.MissingArgumentException; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.dspace.app.util.factory.UtilServiceFactory; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.iiif.canvasdimension.factory.IIIFCanvasDimensionServiceFactory; +import org.dspace.iiif.canvasdimension.service.IIIFCanvasDimensionService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Sets IIIF canvas metadata on bitstreams based on image size. + * + * @author Michael Spalti mspalti@willamette.edu + */ +public class CanvasDimensionCLI { + + private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService(); + private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); + + private CanvasDimensionCLI() {} + + public static void main(String[] argv) throws Exception { + + Date startTime = new Date(); + + boolean iiifEnabled = configurationService.getBooleanProperty("iiif.enabled"); + if (!iiifEnabled) { + System.out.println("WARNING: IIIF is not enabled on this DSpace server."); + } + + // default to not updating existing dimensions + boolean force = false; + // default to printing messages + boolean isQuiet = false; + // default to no limit + int max2Process = Integer.MAX_VALUE; + + String identifier = null; + String eperson = null; + + Context context = new Context(Context.Mode.BATCH_EDIT); + + IIIFCanvasDimensionService canvasProcessor = IIIFCanvasDimensionServiceFactory.getInstance() + .getIiifCanvasDimensionService(); + + CommandLineParser parser = new DefaultParser(); + + Options options = new Options(); + options.addOption("i", "identifier", true, + "process IIIF canvas dimensions for images belonging to this identifier"); + options.addOption("e", "eperson", true, + "email of eperson setting the canvas dimensions"); + options.addOption("f", "force", false, + "force update of all IIIF canvas height and width dimensions"); + options.addOption("q", "quiet", false, + "do not print anything except in the event of errors"); + options.addOption("m", "maximum", true, + "process no more than maximum items"); + options.addOption("h", "help", false, + "display help"); + + Option skipOption = Option.builder("s") + .longOpt("skip") + .hasArg() + .hasArgs() + .valueSeparator(',') + .desc( + "SKIP the bitstreams belonging to identifier\n" + + "Separate multiple identifiers with a comma (,)\n" + + "(e.g. -s \n 123456789/34,123456789/323)") + .build(); + options.addOption(skipOption); + + CommandLine line = null; + + try { + line = parser.parse(options, argv); + } catch (MissingArgumentException e) { + System.out.println("ERROR: " + e.getMessage()); + HelpFormatter help = new HelpFormatter(); + help.printHelp("CanvasDimension processor\n", options); + System.exit(1); + } + + if (line.hasOption('h')) { + HelpFormatter help = new HelpFormatter(); + help.printHelp("CanvasDimension processor\n", options); + System.out + .println("\nUUID example: iiif-canvas-dimensions -e user@email.org " + + "-i 1086306d-8a51-43c3-98b9-c3b00f49105f"); + System.out + .println("\nHandle example: iiif-canvas-dimensions -e user@email.org " + + "-i 123456789/12"); + System.exit(0); + } + + if (line.hasOption('f')) { + force = true; + } + if (line.hasOption('q')) { + isQuiet = true; + } + if (line.hasOption('e')) { + eperson = line.getOptionValue('e'); + } + if (line.hasOption('i')) { + identifier = line.getOptionValue('i'); + } else { + HelpFormatter help = new HelpFormatter(); + help.printHelp("CanvasDimension processor\n", options); + System.out.println("An identifier for a Community, Collection, or Item must be provided."); + System.exit(1); + } + if (line.hasOption('m')) { + max2Process = Integer.parseInt(line.getOptionValue('m')); + if (max2Process <= 1) { + System.out.println("Invalid maximum value '" + + line.getOptionValue('m') + "' - ignoring"); + max2Process = Integer.MAX_VALUE; + } + } + String[] skipIds; + + if (line.hasOption('s')) { + //specified which identifiers to skip when processing + skipIds = line.getOptionValues('s'); + + if (skipIds == null || skipIds.length == 0) { //display error, since no identifiers specified to skip + System.err.println("\nERROR: -s (-skip) option requires at least one identifier to SKIP.\n" + + "Make sure to separate multiple identifiers with a comma!\n" + + "(e.g. -s 123456789/34,123456789/323)\n"); + HelpFormatter myhelp = new HelpFormatter(); + myhelp.printHelp("Canvas Dimensions\n", options); + System.exit(1); + } + canvasProcessor.setSkipList(Arrays.asList(skipIds)); + } + + DSpaceObject dso = null; + if (identifier.indexOf('/') != -1) { + dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier); + } else { + dso = UtilServiceFactory.getInstance().getDSpaceObjectUtils() + .findDSpaceObject(context, UUID.fromString(identifier)); + } + + if (dso == null) { + throw new IllegalArgumentException("Cannot resolve " + + identifier + " to a DSpace object."); + } + + EPerson user; + + if (eperson == null) { + System.out.println("You must provide an eperson using the \"-e\" flag."); + System.exit(1); + } + + if (eperson.indexOf('@') != -1) { + // @ sign, must be an email + user = epersonService.findByEmail(context, eperson); + } else { + user = epersonService.find(context, UUID.fromString(eperson)); + } + + if (user == null) { + System.out.println("Error, eperson cannot be found: " + eperson); + System.exit(1); + } + + context.setCurrentUser(user); + + canvasProcessor.setForceProcessing(force); + canvasProcessor.setMax2Process(max2Process); + canvasProcessor.setIsQuiet(isQuiet); + + int processed = 0; + switch (dso.getType()) { + case Constants.COMMUNITY: + processed = canvasProcessor.processCommunity(context, (Community) dso); + break; + case Constants.COLLECTION: + processed = canvasProcessor.processCollection(context, (Collection) dso); + break; + case Constants.ITEM: + canvasProcessor.processItem(context, (Item) dso); + processed = 1; + break; + default: + System.out.println("Unsupported object type."); + break; + } + // commit changes + if (processed >= 1) { + context.commit(); + } + + Date endTime = new Date(); + System.out.println("Started: " + startTime.getTime()); + System.out.println("Ended: " + endTime.getTime()); + System.out.println( + "Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime + .getTime() - startTime.getTime()) + " msecs)"); + + // Always print summary to standard out. + System.out.println(processed + " IIIF items were processed."); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/IIIFCanvasDimensionServiceImpl.java b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/IIIFCanvasDimensionServiceImpl.java new file mode 100644 index 000000000000..a8be8971c04d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/IIIFCanvasDimensionServiceImpl.java @@ -0,0 +1,244 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.canvasdimension; + +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_HEIGHT_QUALIFIER; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_IMAGE_ELEMENT; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_SCHEMA; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_WIDTH_QUALIFIER; + +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.iiif.IIIFApiQueryService; +import org.dspace.iiif.canvasdimension.service.IIIFCanvasDimensionService; +import org.dspace.iiif.util.IIIFSharedUtils; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * This service sets canvas dimensions for bitstreams. Processes communities, + * collections, and individual items. + * + * @author Michael Spalti mspalti@willamette.edu + */ +public class IIIFCanvasDimensionServiceImpl implements IIIFCanvasDimensionService { + + @Autowired() + ItemService itemService; + @Autowired() + CommunityService communityService; + @Autowired() + BitstreamService bitstreamService; + @Autowired() + DSpaceObjectService dSpaceObjectService; + @Autowired() + IIIFApiQueryService iiifApiQuery; + + private boolean forceProcessing = false; + private boolean isQuiet = false; + private List skipList = null; + private int max2Process = Integer.MAX_VALUE; + private int processed = 0; + + // used to check for existing canvas dimension + private static final String IIIF_WIDTH_METADATA = METADATA_IIIF_SCHEMA + "." + METADATA_IIIF_IMAGE_ELEMENT + + "." + METADATA_IIIF_WIDTH_QUALIFIER; + + @Override + public void setForceProcessing(boolean force) { + forceProcessing = force; + } + + @Override + public void setIsQuiet(boolean quiet) { + isQuiet = quiet; + } + + @Override + public void setMax2Process(int max2Process) { + this.max2Process = max2Process; + } + + @Override + public void setSkipList(List skipList) { + this.skipList = skipList; + } + + @Override + public int processCommunity(Context context, Community community) throws Exception { + if (!inSkipList(community.getHandle())) { + List subcommunities = community.getSubcommunities(); + for (Community subcommunity : subcommunities) { + processCommunity(context, subcommunity); + } + List collections = community.getCollections(); + for (Collection collection : collections) { + processCollection(context, collection); + } + } + return processed; + } + + @Override + public int processCollection(Context context, Collection collection) throws Exception { + if (!inSkipList(collection.getHandle())) { + Iterator itemIterator = itemService.findAllByCollection(context, collection); + while (itemIterator.hasNext() && processed < max2Process) { + processItem(context, itemIterator.next()); + } + } + return processed; + } + + @Override + public void processItem(Context context, Item item) throws Exception { + if (!inSkipList(item.getHandle())) { + boolean isIIIFItem = IIIFSharedUtils.isIIIFItem(item); + if (isIIIFItem) { + if (processItemBundles(context, item)) { + ++processed; + } + context.uncacheEntity(item); + } + } + } + + /** + * Process all IIIF bundles for an item. + * @param context + * @param item + * @return + * @throws Exception + */ + private boolean processItemBundles(Context context, Item item) throws Exception { + List bundles = IIIFSharedUtils.getIIIFBundles(item); + boolean done = false; + for (Bundle bundle : bundles) { + List bitstreams = bundle.getBitstreams(); + for (Bitstream bit : bitstreams) { + done |= processBitstream(context, bit); + context.uncacheEntity(bit); + } + } + if (done) { + if (!isQuiet) { + System.out.println("Updated canvas metadata for item: " + item.getID()); + } + } + return done; + + } + + /** + * Gets image height and width for the bitstream. For jp2 images, height and width are + * obtained from the IIIF image server. For other formats supported by ImageIO these values + * are read from the actual DSpace bitstream content. If bitstream width metadata already exists, + * the bitstream is processed when forceProcessing is true. + * @param context + * @param bitstream + * @return + * @throws Exception + */ + private boolean processBitstream(Context context, Bitstream bitstream) throws SQLException, AuthorizeException, + IOException { + + boolean processed = false; + boolean isImage = bitstream.getFormat(context).getMIMEType().contains("image/"); + if (isImage) { + Optional op = bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.') + .contentEquals(IIIF_WIDTH_METADATA)).findFirst(); + if (op.isEmpty() || forceProcessing) { + if (forceProcessing && !isQuiet) { + System.out.println("Force processing for bitstream: " + bitstream.getID()); + } + int[] dims; + InputStream stream = null; + try { + stream = bitstreamService.retrieve(context, bitstream); + try { + dims = ImageDimensionReader.getImageDimensions(stream); + if (dims == null) { + // If image dimensions are not available try the iiif image server. + dims = iiifApiQuery.getImageDimensions(bitstream); + } + } catch (IOException e) { + // If an exception was raised by ImageIO, try the iiif image server. + dims = iiifApiQuery.getImageDimensions(bitstream); + } + } finally { + if (stream != null) { + stream.close(); + } + } + + if (dims != null) { + processed = setBitstreamMetadata(context, bitstream, dims); + // update the bitstream + bitstreamService.update(context, bitstream); + } + } + } + return processed; + } + + /** + * Sets bitstream metadata for "iiif.image.width" and "iiif.image.height". + * @param context + * @param bitstream + * @param dims + * @return + */ + private boolean setBitstreamMetadata(Context context, Bitstream bitstream, int[] dims) throws SQLException { + dSpaceObjectService.clearMetadata(context, bitstream, METADATA_IIIF_SCHEMA, + METADATA_IIIF_IMAGE_ELEMENT, METADATA_IIIF_WIDTH_QUALIFIER, Item.ANY); + dSpaceObjectService.setMetadataSingleValue(context, bitstream, METADATA_IIIF_SCHEMA, + METADATA_IIIF_IMAGE_ELEMENT, METADATA_IIIF_WIDTH_QUALIFIER, null, String.valueOf(dims[0])); + dSpaceObjectService.clearMetadata(context, bitstream, METADATA_IIIF_SCHEMA, + METADATA_IIIF_IMAGE_ELEMENT, METADATA_IIIF_HEIGHT_QUALIFIER, Item.ANY); + dSpaceObjectService.setMetadataSingleValue(context, bitstream, METADATA_IIIF_SCHEMA, + METADATA_IIIF_IMAGE_ELEMENT, METADATA_IIIF_HEIGHT_QUALIFIER, null, String.valueOf(dims[1])); + if (!isQuiet) { + System.out.println("Added IIIF canvas metadata to bitstream: " + bitstream.getID()); + } + return true; + } + + /** + * Tests whether the identifier is in the skip list. + * @param identifier + * @return + */ + private boolean inSkipList(String identifier) { + if (skipList != null && skipList.contains(identifier)) { + if (!isQuiet) { + System.out.println("SKIP-LIST: skipped bitstreams within identifier " + identifier); + } + return true; + } else { + return false; + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/ImageDimensionReader.java b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/ImageDimensionReader.java new file mode 100644 index 000000000000..4e46c8b43f4d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/ImageDimensionReader.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.canvasdimension; + +import static org.dspace.iiif.canvasdimension.Util.checkDimensions; + +import java.awt.image.BufferedImage; +import java.io.IOException; +import java.io.InputStream; +import javax.imageio.ImageIO; + +/** + * Reads and return height and width dimensions for image bitstreams. + * + * @author Michael Spalti mspalti@willamette.edu + */ +public class ImageDimensionReader { + + private ImageDimensionReader() {} + + /** + * Uses ImageIO to read height and width dimensions. + * @param image inputstream for dspace image + * @return image dimensions or null if the image format cannot be read. + * @throws Exception + */ + public static int[] getImageDimensions(InputStream image) throws IOException { + int[] dims = new int[2]; + BufferedImage buf = ImageIO.read(image); + if (buf != null) { + int width = buf.getWidth(null); + int height = buf.getHeight(null); + if (width > 0 && height > 0) { + dims[0] = width; + dims[1] = height; + return checkDimensions(dims); + } + } + return null; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/Util.java b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/Util.java new file mode 100644 index 000000000000..630febfcbf51 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/Util.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.canvasdimension; + +/** + * Utilities for IIIF canvas dimension processing. + * + * @author Michael Spalti mspalti@willamette.edu + */ +public class Util { + + private Util() {} + + /** + * IIIF Presentation API version 2.1.1: + * If the largest image’s dimensions are less than 1200 pixels on either edge, then + * the canvas’s dimensions SHOULD be double those of the image. + * @param dims + * @return + */ + public static int[] checkDimensions(int[] dims) { + if (dims[0] < 1200 || dims[1] < 1200) { + dims[0] = dims[0] * 2; + dims[1] = dims[1] * 2; + } + return dims; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/factory/IIIFCanvasDimensionServiceFactory.java b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/factory/IIIFCanvasDimensionServiceFactory.java new file mode 100644 index 000000000000..56e5cfee95b6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/factory/IIIFCanvasDimensionServiceFactory.java @@ -0,0 +1,27 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.canvasdimension.factory; + +import org.dspace.iiif.canvasdimension.service.IIIFCanvasDimensionService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Factory for the image dimension service. + * + * @author Michael Spalti mspalti@willamette.edu + */ +public abstract class IIIFCanvasDimensionServiceFactory { + + public static IIIFCanvasDimensionServiceFactory getInstance() { + return DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName("iiifCanvasDimensionServiceFactory", + IIIFCanvasDimensionServiceFactory.class); + } + + public abstract IIIFCanvasDimensionService getIiifCanvasDimensionService(); +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/factory/IIIFCanvasDimensionServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/factory/IIIFCanvasDimensionServiceFactoryImpl.java new file mode 100644 index 000000000000..0ab17a29a401 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/factory/IIIFCanvasDimensionServiceFactoryImpl.java @@ -0,0 +1,27 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.canvasdimension.factory; + +import org.dspace.iiif.canvasdimension.service.IIIFCanvasDimensionService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Factory for the image dimension service. + * + * @author Michael Spalti mspalti@willamette.edu + */ +public class IIIFCanvasDimensionServiceFactoryImpl extends IIIFCanvasDimensionServiceFactory { + + @Autowired() + private IIIFCanvasDimensionService iiifCanvasDimensionService; + + @Override + public IIIFCanvasDimensionService getIiifCanvasDimensionService() { + return iiifCanvasDimensionService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/service/IIIFCanvasDimensionService.java b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/service/IIIFCanvasDimensionService.java new file mode 100644 index 000000000000..7e49e4ada79a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/canvasdimension/service/IIIFCanvasDimensionService.java @@ -0,0 +1,69 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.canvasdimension.service; + +import java.util.List; + +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Context; + +public interface IIIFCanvasDimensionService { + + /** + * Set IIIF canvas dimensions on all IIIF items in a community and its + * sub-communities. + * @param context + * @param community + * @throws Exception + */ + int processCommunity(Context context, Community community) throws Exception; + + /** + * Set IIIF canvas dimensions on all IIIF items in a collection. + * @param context + * @param collection + * @throws Exception + */ + int processCollection(Context context, Collection collection) throws Exception; + + /** + * Set IIIF canvas dimensions for an item. + * @param context + * @param item + * @throws Exception + */ + void processItem(Context context, Item item) throws Exception; + + /** + * Set the force processing property. If true, existing canvas + * metadata will be replaced. + * @param force + */ + void setForceProcessing(boolean force); + + /** + * Set whether to output messages during processing. + * @param quiet + */ + void setIsQuiet(boolean quiet); + + /** + * Set the maximum number of items to process. + * @param max2Process + */ + void setMax2Process(int max2Process); + + /** + * Set dso identifiers to skip. + * @param skipList + */ + void setSkipList(List skipList); + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/consumer/CacheEvictBeanLocator.java b/dspace-api/src/main/java/org/dspace/iiif/consumer/CacheEvictBeanLocator.java new file mode 100644 index 000000000000..43e6adc20fb7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/consumer/CacheEvictBeanLocator.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.consumer; + +import org.springframework.beans.BeansException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.stereotype.Component; + +/** + * Exposes the Spring application's IIIF cache evict service to the DSpace event consumer. + */ +@Component +public class CacheEvictBeanLocator implements ApplicationContextAware { + + private static ApplicationContext context; + + private static final String MANIFESTS_CACHE_EVICT_SERVICE = "manifestsCacheEvictService"; + private static final String CANVAS_DIMENSIONS_EVICT_SERVICE = "canvasCacheEvictService"; + + @Override + public void setApplicationContext(ApplicationContext appContext) + throws BeansException { + context = appContext; + } + + public static ApplicationContext getApplicationContext() { + return context; + } + + public static ManifestsCacheEvictService getManifestsCacheEvictService() { + if (context != null) { + return (ManifestsCacheEvictService) context.getBean(MANIFESTS_CACHE_EVICT_SERVICE); + } + return null; + } + + public static CanvasCacheEvictService getCanvasCacheEvictService() { + if (context != null) { + return (CanvasCacheEvictService) context.getBean(CANVAS_DIMENSIONS_EVICT_SERVICE); + } + return null; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/consumer/CanvasCacheEvictService.java b/dspace-api/src/main/java/org/dspace/iiif/consumer/CanvasCacheEvictService.java new file mode 100644 index 000000000000..beeb40ceacaa --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/consumer/CanvasCacheEvictService.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.consumer; + +import java.util.Objects; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.CacheManager; +import org.springframework.stereotype.Component; + +@Component +public class CanvasCacheEvictService { + + // The cache that is managed by this service. + static final String CACHE_NAME = "canvasdimensions"; + + @Autowired + CacheManager cacheManager; + + public void evictSingleCacheValue(String cacheKey) { + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(cacheKey); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/consumer/IIIFCacheEventConsumer.java b/dspace-api/src/main/java/org/dspace/iiif/consumer/IIIFCacheEventConsumer.java new file mode 100644 index 000000000000..1d6a6783018c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/consumer/IIIFCacheEventConsumer.java @@ -0,0 +1,166 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.consumer; + +import java.util.HashSet; +import java.util.Set; +import java.util.UUID; + +import org.apache.logging.log4j.Logger; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.event.Consumer; +import org.dspace.event.Event; + + +/** + * This consumer is used to evict modified items from the manifests cache. + */ +public class IIIFCacheEventConsumer implements Consumer { + + private final static Logger log = org.apache.logging.log4j.LogManager.getLogger(IIIFCacheEventConsumer.class); + + // When true all entries will be cleared from cache. + private boolean clearAll = false; + + // Collects modified items for individual removal from cache. + private final Set toEvictFromManifestCache = new HashSet<>(); + + // Collects modified bitstreams for individual removal from canvas dimension cache. + private final Set toEvictFromCanvasCache = new HashSet<>(); + + @Override + public void initialize() throws Exception { + } + + @Override + public void consume(Context ctx, Event event) throws Exception { + int st = event.getSubjectType(); + if (!(st == Constants.BUNDLE || st == Constants.ITEM || st == Constants.BITSTREAM)) { + return; + } + // This subject may become a reference to the parent Item that will be evicted from + // the manifests cache. + DSpaceObject subject = event.getSubject(ctx); + DSpaceObject unmodifiedSubject = event.getSubject(ctx); + + int et = event.getEventType(); + + if (et == Event.DELETE || et == Event.REMOVE) { + log.warn("IIIF event consumer cannot remove a single item from the cache when " + + "a bundle is deleted. The entire cache will be cleared."); + clearAll = true; + } + + if (st == Constants.BUNDLE) { + if ((et == Event.ADD || et == Event.MODIFY || et == Event.MODIFY_METADATA || et == Event.REMOVE + || et == Event.DELETE) && subject != null) { + // set subject to be the parent Item. + subject = ((Bundle) subject).getItems().get(0); + if (log.isDebugEnabled()) { + log.debug("Transforming Bundle event into Item event for " + + subject.getID()); + } + } else { + return; + } + } + + if (st == Constants.BITSTREAM) { + if (et == Event.DELETE || et == Event.REMOVE) { + log.warn("IIIF event consumer cannot remove a single item from the cache when " + + "a bitstream is deleted. The entire cache will be cleared."); + clearAll = true; + } + + if ((et == Event.ADD || et == Event.MODIFY_METADATA ) && subject != null + && ((Bitstream) subject).getBundles().size() > 0) { + // set subject to be the parent Item. + Bundle bundle = ((Bitstream) subject).getBundles().get(0); + subject = bundle.getItems().get(0); + if (log.isDebugEnabled()) { + log.debug("Transforming Bitstream event into Item event for " + + subject.getID()); + } + } else { + return; + } + } + + if (st == Constants.ITEM && et == Event.ADD) { + // nothing to evict from cache. + return; + } + + switch (et) { + case Event.ADD: + addToCacheEviction(subject, unmodifiedSubject, st); + break; + case Event.MODIFY: + addToCacheEviction(subject, unmodifiedSubject, st); + break; + case Event.MODIFY_METADATA: + addToCacheEviction(subject, unmodifiedSubject, st); + break; + case Event.REMOVE: + addToCacheEviction(subject, unmodifiedSubject, st); + break; + case Event.DELETE: + addToCacheEviction(subject, unmodifiedSubject, st); + break; + default: { + log.warn("ManifestsCacheEventConsumer should not have been given this kind of " + + "subject in an event, skipping: " + event); + } + } + } + + private void addToCacheEviction(DSpaceObject subject, DSpaceObject subject2, int type) { + if (type == Constants.BITSTREAM) { + toEvictFromCanvasCache.add(subject2); + } + toEvictFromManifestCache.add(subject); + } + + @Override + public void end(Context ctx) throws Exception { + // Get the eviction service beans. + ManifestsCacheEvictService manifestsCacheEvictService = CacheEvictBeanLocator.getManifestsCacheEvictService(); + CanvasCacheEvictService canvasCacheEvictService = CacheEvictBeanLocator.getCanvasCacheEvictService(); + + if (manifestsCacheEvictService != null) { + if (clearAll) { + manifestsCacheEvictService.evictAllCacheValues(); + } else { + for (DSpaceObject dso : toEvictFromManifestCache) { + UUID uuid = dso.getID(); + manifestsCacheEvictService.evictSingleCacheValue(uuid.toString()); + } + } + } + if (canvasCacheEvictService != null) { + for (DSpaceObject dso : toEvictFromCanvasCache) { + UUID uuid = dso.getID(); + canvasCacheEvictService.evictSingleCacheValue(uuid.toString()); + } + } + + clearAll = false; + toEvictFromManifestCache.clear(); + toEvictFromCanvasCache.clear(); + } + + @Override + public void finish(Context ctx) throws Exception { + + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/consumer/ManifestsCacheEvictService.java b/dspace-api/src/main/java/org/dspace/iiif/consumer/ManifestsCacheEvictService.java new file mode 100644 index 000000000000..963ce3113fb7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/consumer/ManifestsCacheEvictService.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.consumer; + +import java.util.Objects; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.CacheManager; +import org.springframework.stereotype.Component; + +/** + * Removes items from the iiif manifests cache. + */ +@Component +public class ManifestsCacheEvictService { + + // The cache that is managed by this service. + static final String CACHE_NAME = "manifests"; + + @Autowired + CacheManager cacheManager; + + public void evictSingleCacheValue(String cacheKey) { + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(cacheKey); + } + + public void evictAllCacheValues() { + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).invalidate(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/logger/CacheLogger.java b/dspace-api/src/main/java/org/dspace/iiif/logger/CacheLogger.java new file mode 100644 index 000000000000..28d57975bfdd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/logger/CacheLogger.java @@ -0,0 +1,25 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.iiif.logger; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.CacheEventListener; + +public class CacheLogger implements CacheEventListener { + private static final Logger log = LogManager.getLogger(CacheLogger.class); + + @Override + public void onEvent(CacheEvent cacheEvent) { + log.info("Cache Event Type: {} | Key: {} ", + cacheEvent.getType(), cacheEvent.getKey()); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/logger/CanvasCacheLogger.java b/dspace-api/src/main/java/org/dspace/iiif/logger/CanvasCacheLogger.java new file mode 100644 index 000000000000..2f1a8d6dbabd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/logger/CanvasCacheLogger.java @@ -0,0 +1,24 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.logger; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.CacheEventListener; + +public class CanvasCacheLogger implements CacheEventListener { + private static final Logger log = LogManager.getLogger(CacheLogger.class); + + @Override + public void onEvent(CacheEvent cacheEvent) { + log.info("Canvas Dimension Cache Event Type: {} | Key: {} ", + cacheEvent.getType(), cacheEvent.getKey()); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/iiif/util/IIIFSharedUtils.java b/dspace-api/src/main/java/org/dspace/iiif/util/IIIFSharedUtils.java new file mode 100644 index 000000000000..67e827511373 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/iiif/util/IIIFSharedUtils.java @@ -0,0 +1,116 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.util; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.license.CreativeCommonsServiceImpl; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Shared utilities for IIIF processing. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +public class IIIFSharedUtils { + + // metadata used to enable the iiif features on the item + public static final String METADATA_IIIF_ENABLED = "dspace.iiif.enabled"; + // The DSpace bundle for other content related to item. + protected static final String OTHER_CONTENT_BUNDLE = "OtherContent"; + // The IIIF image server url from configuration + protected static final String IMAGE_SERVER_PATH = "iiif.image.server"; + // IIIF metadata definitions + public static final String METADATA_IIIF_SCHEMA = "iiif"; + public static final String METADATA_IIIF_IMAGE_ELEMENT = "image"; + public static final String METADATA_IIIF_TOC_ELEMENT = "toc"; + public static final String METADATA_IIIF_LABEL_ELEMENT = "label"; + public static final String METADATA_IIIF_HEIGHT_QUALIFIER = "height"; + public static final String METADATA_IIIF_WIDTH_QUALIFIER = "width"; + + protected static final ConfigurationService configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); + + + private IIIFSharedUtils() {} + + public static boolean isIIIFItem(Item item) { + return item.getMetadata().stream().filter(m -> m.getMetadataField().toString('.') + .contentEquals(METADATA_IIIF_ENABLED)) + .anyMatch(m -> m.getValue().equalsIgnoreCase("true") || + m.getValue().equalsIgnoreCase("yes")); + } + + /** + * This method returns the bundles holding IIIF resources if any. + * If there is no IIIF content available an empty bundle list is returned. + * @param item the DSpace item + * + * @return list of DSpace bundles with IIIF content + */ + public static List getIIIFBundles(Item item) { + boolean iiif = isIIIFEnabled(item); + List bundles = new ArrayList<>(); + if (iiif) { + bundles = item.getBundles().stream().filter(IIIFSharedUtils::isIIIFBundle).collect(Collectors.toList()); + } + return bundles; + } + + /** + * This method verify if the IIIF feature is enabled on the item or parent collection. + * + * @param item the dspace item + * @return true if the item supports IIIF + */ + public static boolean isIIIFEnabled(Item item) { + return item.getOwningCollection().getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_ENABLED)) + .anyMatch(m -> m.getValue().equalsIgnoreCase("true") || + m.getValue().equalsIgnoreCase("yes")) + || item.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_ENABLED)) + .anyMatch(m -> m.getValue().equalsIgnoreCase("true") || + m.getValue().equalsIgnoreCase("yes")); + } + + /** + * Utility method to check is a bundle can contain bitstreams to use as IIIF + * resources + * + * @param b the DSpace bundle to check + * @return true if the bundle can contain bitstreams to use as IIIF resources + */ + public static boolean isIIIFBundle(Bundle b) { + return !StringUtils.equalsAnyIgnoreCase(b.getName(), Constants.LICENSE_BUNDLE_NAME, + Constants.METADATA_BUNDLE_NAME, CreativeCommonsServiceImpl.CC_BUNDLE_NAME, "THUMBNAIL", + "BRANDED_PREVIEW", "TEXT", OTHER_CONTENT_BUNDLE) + && b.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_ENABLED)) + .noneMatch(m -> m.getValue().equalsIgnoreCase("false") || m.getValue().equalsIgnoreCase("no")); + } + + /** + * Returns url for retrieving info.json metadata from the image server. + * @param bitstream + * @return + */ + public static String getInfoJsonPath(Bitstream bitstream) { + String iiifImageServer = configurationService.getProperty(IMAGE_SERVER_PATH); + return iiifImageServer + bitstream.getID() + "/info.json"; + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/README.md b/dspace-api/src/main/java/org/dspace/importer/external/README.md deleted file mode 100644 index 66f22d9e2759..000000000000 --- a/dspace-api/src/main/java/org/dspace/importer/external/README.md +++ /dev/null @@ -1,159 +0,0 @@ -- [Introduction](#Introduction) - - [Features](#Features) - - [Abstraction of input format](#Abstraction-input-format) - - [Transformation to DSpace item](#transformation) - - [Relation with BTE](#bte) -- [Implementation of an import source](#Example-implementation) - - [Inherited methods](#Inherited-methods) - - [Metadata mapping](#Mapping) - - -# Introduction # - -This documentation explains the features and the usage of the importer framework. -Enabling the framework can be achieved by removing the comment block from the following step in item-submission.xml -Implementation specific or additional configuration can be found in their related documentation, if any. (Some implementations use other submission steps altogether, so make sure to double check) - -``` - - submit.progressbar.lookup - org.dspace.submit.step.XMLUIStartSubmissionLookupStep - org.dspace.app.webui.submit.step.JSPStartSubmissionLookupStep - org.dspace.app.xmlui.aspect.submission.submit.StartSubmissionLookupStep - true - -``` - -## Features ## - -- lookup publications from remote sources -- Support for multiple implementations - -## Abstraction of input format ## - -The importer framework does not enforce a specific input format. Each importer implementation defines which input format it expects from a remote source. -The import framework uses generics to achieve this. Each importer implementation will have a type set of the record type it receives from the remote source's response. -This type set will also be used by the framework to use the correct MetadataFieldMapping for a certain implementation. Read [Implementation of an import source](#Example-implementation) for more information. - -## Transformation to DSpace item ## - -The framework produces an 'ImportRecord' that is completely decoupled from DSPace. It contains a set of metadata DTO's that contain the notion of schema,element and qualifier. The specific implementation is responsible for populating this set. It is then very simple to create a DSPace item from this list. - -## Relation with BTE ## - -While there is some overlap between this framework and BTE, this framework supports some features that are hard to implement using the BTE. It has explicit support to deal with network failure and throttling imposed by the data source. It also has explicit support for distinguishing between network caused errors and invalid requests to the source. -Furthermore the framework doesn't impose any restrictions on the format in which the data is retrieved. It uses java generics to support different source record types. A reference implementation of using XML records is provided for which a set of metadata can be generated from any xpath expression (or composite of xpath expressions). -Unless 'advanced' processing is necessary (e.g. lookup of authors in an LDAP directory) this metadata mapping can be simply configured using spring. No code changes necessary. A mixture of advanced and simple (xpath) mapping is also possible. - -This design is also in line with the roadmap to create a Modular Framework as detailed in [https://wiki.duraspace.org/display/DSPACE/Design+-+Module+Framework+and+Registry](https://wiki.duraspace.org/display/DSPACE/Design+-+Module+Framework+and+Registry) -This modular design also allows it to be completely independent of the user interface layer, be it JSPUI, XMLUI, command line or the result of the new UI projects: [https://wiki.duraspace.org/display/DSPACE/Design+-+Single+UI+Project](https://wiki.duraspace.org/display/DSPACE/Design+-+Single+UI+Project) - -# Implementation of an import source # - -Each importer implementation must at least implement interface *org.dspace.importer.external.service.components.MetadataSource* and implement the inherited methods. - -One can also choose to implement class *org.dspace.importer.external.service.components.AbstractRemoteMetadataSource* next to the MetadataSource interface. This class contains functionality to handle request timeouts and to retry requests. - -A third option is to implement class *org.dspace.importer.external.service.AbstractImportSourceService*. This class already implements both the MetadataSource interface and Source class. AbstractImportSourceService has a generic type set 'RecordType'. In the importer implementation this type set should be the class of the records received from the remote source's response (e.g. when using axiom to get the records from the remote source's XML response, the importer implementation's type set is *org.apache.axiom.om.OMElement*). - -Implementing the AbstractImportSourceService allows the importer implementation to use the framework's build-in support to transform a record received from the remote source to an object of class *org.dspace.importer.external.datamodel.ImportRecord* containing DSpace metadata fields, as explained here: [Metadata mapping](#Mapping). - -## Inherited methods ## - -Method getImportSource() should return a unique identifier. Importer implementations should not be called directly, but class *org.dspace.importer.external.service.ImportService* should be called instead. This class contains the same methods as the importer implementations, but with an extra parameter 'url'. This url parameter should contain the same identifier that is returned by the getImportSource() method of the importer implementation you want to use. - -The other inherited methods are used to query the remote source. - -## Metadata mapping ## - -When using an implementation of AbstractImportSourceService, a mapping of remote record fields to DSpace metadata fields can be created. - -first create an implementation of class AbstractMetadataFieldMapping with the same type set used for the importer implementation. - -Then create a spring configuration file in [dspace.dir]/config/spring/api. - -Each DSpace metadata field that will be used for the mapping must first be configured as a spring bean of class *org.dspace.importer.external.metadatamapping.MetadataFieldConfig*. - -```xml - - - -``` - -Now this metadata field can be used to create a mapping. To add a mapping for the "dc.title" field declared above, a new spring bean configuration of a class class *org.dspace.importer.external.metadatamapping.contributor.MetadataContributor* needs to be added. This interface contains a type argument. -The type needs to match the type used in the implementation of AbstractImportSourceService. The responsibility of each MetadataContributor implementation is to generate a set of metadata from the retrieved document. How it does that is completely opaque to the AbstractImportSourceService but it is assumed that only one entity (i.e. item) is fed to the metadatum contributor. - - -For example ```java SimpleXpathMetadatumContributor implements MetadataContributor``` can parse a fragment of xml and generate one or more metadata values. - - -This bean expects 2 property values: - -- field: A reference to the configured spring bean of the DSpace metadata field. e.g. the "dc.title" bean declared above. -- query: The xpath expression used to select the record value returned by the remote source. - -```xml - - - - -``` - -Multiple record fields can also be combined into one value. To implement a combined mapping first create a *SimpleXpathMetadatumContributor* as explained above for each part of the field. - -```xml - - - - - - - - -``` - -Note that namespace prefixes used in the xpath queries are configured in bean "FullprefixMapping" in the same spring file. - -```xml - - Defines the namespace mappin for the SimpleXpathMetadatum contributors - - - -``` - -Then create a new list in the spring configuration containing references to all *SimpleXpathMetadatumContributor* beans that need to be combined. - -```xml - - - - {{/code}} -``` - -Finally create a spring bean configuration of class *org.dspace.importer.external.metadatamapping.contributor.CombinedMetadatumContributor*. This bean expects 3 values: - -- field: A reference to the configured spring bean of the DSpace metadata field. e.g. the "dc.title" bean declared above. -- metadatumContributors: A reference to the list containing all the single record field mappings that need to be combined. -- separator: These characters will be added between each record field value when they are combined into one field. - -```xml - - - - - -``` - -Each contributor must also be added to the "MetadataFieldMap" used by the *MetadataFieldMapping* implementation. Each entry of this map maps a metadata field bean to a contributor. For the contributors created above this results in the following configuration: - -```xml - - - - -``` - -Note that the single field mappings used for the combined author mapping are not added to this list. - diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSFieldMapping.java new file mode 100644 index 000000000000..e7d2d3398b6f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSFieldMapping.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ads; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the ADS metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class ADSFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "adsMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..8fbe4ef2cf57 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java @@ -0,0 +1,334 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ads; + +import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS; + +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying ADS + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class ADSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String resultFieldList; + + private String apiKey; + private int timeout = 1000; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public String getImportSource() { + return "ads"; + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(id)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + return retry(new FindMatchingRecordCallable(query)); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for CrossRef"); + } + + @Override + public void init() throws Exception {} + + public String getApiKey() { + return apiKey; + } + + public void setApiKey(String apiKey) { + this.apiKey = apiKey; + } + + /** + * This class is a Callable implementation to get ADS entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + return search(query.getParameterAsClass("query", String.class), + query.getParameterAsClass("start", Integer.class), + query.getParameterAsClass("count", Integer.class), + getApiKey()); + } + } + + /** + * This class is a Callable implementation to get an ADS entry using bibcode + * The bibcode to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + String queryString = "bibcode:" + query.getParameterAsClass("id", String.class); + return search(queryString, 0 , 1, getApiKey()); + } + } + + /** + * This class is a Callable implementation to search ADS entries + * using author and title and year. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String author = query.getParameterAsClass("author", String.class); + String title = query.getParameterAsClass("title", String.class); + Integer year = query.getParameterAsClass("year", Integer.class); + return search(title, author, year, start, count, getApiKey()); + } + + } + + /** + * This class is a Callable implementation to count the number of entries for an ADS query. + * This Callable use as query value to ADS the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + private Query query; + + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + return count(query.getParameterAsClass("query", String.class), getApiKey()); + } + } + + private List search(String title, String author, int year, int start, int count, String token) { + String query = ""; + if (StringUtils.isNotBlank(title)) { + query += "title:" + title; + } + if (StringUtils.isNotBlank(author)) { + String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)"; + String[] authors = author.split(splitRegex); + // [FAU] + if (StringUtils.isNotBlank(query)) { + query = "author:"; + } else { + query += "&fq=author:"; + } + int x = 0; + for (String auth : authors) { + x++; + query += auth; + if (x < authors.length) { + query += " AND "; + } + } + } + if (year != -1) { + // [DP] + if (StringUtils.isNotBlank(query)) { + query = "year:"; + } else { + query += "&fq=year:"; + } + query += year; + } + return search(query.toString(), start, count, token); + } + + public Integer count(String query, String token) { + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + token); + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("q", query); + uriBuilder.addParameter("rows", "1"); + uriBuilder.addParameter("start", "0"); + uriBuilder.addParameter("fl", this.resultFieldList); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + return jsonNode.at("/response/numFound").asInt(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return 0; + } + + public List search(String query, Integer start, Integer count, String token) { + List adsResults = new ArrayList<>(); + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + token); + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("q", query); + uriBuilder.addParameter("rows", count.toString()); + uriBuilder.addParameter("start", start.toString()); + uriBuilder.addParameter("fl", this.resultFieldList); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + JsonNode docs = jsonNode.at("/response/docs"); + if (docs.isArray()) { + Iterator nodes = docs.elements(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + adsResults.add(transformSourceRecords(node.toString())); + } + } else { + adsResults.add(transformSourceRecords(docs.toString())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return adsResults; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + try { + return new ObjectMapper().readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return null; + } + + public void setUrl(String url) { + this.url = url; + } + + public void setResultFieldList(String resultFieldList) { + this.resultFieldList = resultFieldList; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java index ed5ac5960b8b..7bd42cf07a4c 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java @@ -9,10 +9,10 @@ import java.util.Collection; -import org.apache.axiom.om.OMElement; import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; import org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor; +import org.jdom2.Element; /** * Arxiv specific implementation of {@link MetadataContributor} @@ -32,7 +32,7 @@ public class ArXivIdMetadataContributor extends SimpleXpathMetadatumContributor * @return a collection of import records. Only the identifier of the found records may be put in the record. */ @Override - public Collection contributeMetadata(OMElement t) { + public Collection contributeMetadata(Element t) { Collection values = super.contributeMetadata(t); parseValue(values); return values; diff --git a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java index 6b418423fac6..96689e62ba75 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java @@ -7,8 +7,10 @@ */ package org.dspace.importer.external.arxiv.service; +import java.io.IOException; import java.io.StringReader; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.concurrent.Callable; @@ -20,10 +22,6 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import org.apache.axiom.om.OMElement; -import org.apache.axiom.om.OMXMLBuilderFactory; -import org.apache.axiom.om.OMXMLParserWrapper; -import org.apache.axiom.om.xpath.AXIOMXPath; import org.apache.commons.lang3.StringUtils; import org.dspace.content.Item; import org.dspace.importer.external.datamodel.ImportRecord; @@ -31,7 +29,14 @@ import org.dspace.importer.external.exception.MetadataSourceException; import org.dspace.importer.external.service.AbstractImportMetadataSourceService; import org.dspace.importer.external.service.components.QuerySource; -import org.jaxen.JaxenException; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; /** * Implements a data source for querying ArXiv @@ -39,7 +44,7 @@ * @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it) * */ -public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService +public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService implements QuerySource { private WebTarget webTarget; @@ -213,15 +218,20 @@ public Integer call() throws Exception { Response response = invocationBuilder.get(); if (response.getStatus() == 200) { String responseString = response.readEntity(String.class); - OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(responseString)); - OMElement element = records.getDocumentElement(); - AXIOMXPath xpath = null; + + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(responseString)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList(Namespace.getNamespace("opensearch", + "http://a9.com/-/spec/opensearch/1.1/")); + XPathExpression xpath = + XPathFactory.instance().compile("opensearch:totalResults", Filters.element(), null, namespaces); + + Element count = xpath.evaluateFirst(root); try { - xpath = new AXIOMXPath("opensearch:totalResults"); - xpath.addNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"); - OMElement count = (OMElement) xpath.selectSingleNode(element); return Integer.parseInt(count.getText()); - } catch (JaxenException e) { + } catch (NumberFormatException e) { return null; } } else { @@ -274,8 +284,8 @@ public List call() throws Exception { Response response = invocationBuilder.get(); if (response.getStatus() == 200) { String responseString = response.readEntity(String.class); - List omElements = splitToRecords(responseString); - for (OMElement record : omElements) { + List elements = splitToRecords(responseString); + for (Element record : elements) { results.add(transformSourceRecords(record)); } return results; @@ -321,8 +331,8 @@ public List call() throws Exception { Response response = invocationBuilder.get(); if (response.getStatus() == 200) { String responseString = response.readEntity(String.class); - List omElements = splitToRecords(responseString); - for (OMElement record : omElements) { + List elements = splitToRecords(responseString); + for (Element record : elements) { results.add(transformSourceRecords(record)); } return results; @@ -359,8 +369,8 @@ public List call() throws Exception { Response response = invocationBuilder.get(); if (response.getStatus() == 200) { String responseString = response.readEntity(String.class); - List omElements = splitToRecords(responseString); - for (OMElement record : omElements) { + List elements = splitToRecords(responseString); + for (Element record : elements) { results.add(transformSourceRecords(record)); } return results; @@ -387,16 +397,21 @@ private String getQuery(Query query) { } } - private List splitToRecords(String recordsSrc) { - OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc)); - OMElement element = records.getDocumentElement(); - AXIOMXPath xpath = null; + private List splitToRecords(String recordsSrc) { + try { - xpath = new AXIOMXPath("ns:entry"); - xpath.addNamespace("ns", "http://www.w3.org/2005/Atom"); - List recordsList = xpath.selectNodes(element); + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList(Namespace.getNamespace("ns", + "http://www.w3.org/2005/Atom")); + XPathExpression xpath = + XPathFactory.instance().compile("ns:entry", Filters.element(), null, namespaces); + + List recordsList = xpath.evaluate(root); return recordsList; - } catch (JaxenException e) { + } catch (JDOMException | IOException e) { return null; } } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java index 7468d601f538..0014088c8650 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/bibtex/service/BibtexImportMetadataSourceServiceImpl.java @@ -70,11 +70,24 @@ protected List readData (InputStream keyValueItem.setKey(entry.getValue().getType().getValue()); keyValueItem.setValue(entry.getKey().getValue()); keyValues.add(keyValueItem); + PlainMetadataKeyValueItem typeItem = new PlainMetadataKeyValueItem(); + typeItem.setKey("type"); + typeItem.setValue(entry.getValue().getType().getValue()); + keyValues.add(typeItem); if (entry.getValue().getFields() != null) { for (Entry subentry : entry.getValue().getFields().entrySet()) { PlainMetadataKeyValueItem innerItem = new PlainMetadataKeyValueItem(); - innerItem.setKey(subentry.getKey().getValue()); - innerItem.setValue(subentry.getValue().toUserString()); + innerItem.setKey(subentry.getKey().getValue().toLowerCase()); + String latexString = subentry.getValue().toUserString(); + try { + org.jbibtex.LaTeXParser laTeXParser = new org.jbibtex.LaTeXParser(); + List latexObjects = laTeXParser.parse(latexString); + org.jbibtex.LaTeXPrinter laTeXPrinter = new org.jbibtex.LaTeXPrinter(); + String plainTextString = laTeXPrinter.print(latexObjects); + innerItem.setValue(plainTextString.replaceAll("\n", " ")); + } catch (ParseException e) { + innerItem.setValue(latexString); + } keyValues.add(innerItem); } } @@ -92,10 +105,10 @@ private BibTeXDatabase parseBibTex(InputStream inputStream) throws IOException, /** - * Retrieve the MetadataFieldMapping containing the mapping between RecordType + * Set the MetadataFieldMapping containing the mapping between RecordType * (in this case PlainMetadataSourceDto.class) and Metadata * - * @return The configured MetadataFieldMapping + * @param metadataFieldMap The configured MetadataFieldMapping */ @Override @SuppressWarnings("unchecked") diff --git a/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiFieldMapping.java new file mode 100644 index 000000000000..f266ff3d8512 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiFieldMapping.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.cinii; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Cinii metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class CiniiFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "ciniiMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..587ad5b25838 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/cinii/CiniiImportMetadataSourceServiceImpl.java @@ -0,0 +1,453 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.cinii; + +import java.io.IOException; +import java.io.StringReader; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpException; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.dspace.services.ConfigurationService; +import org.jdom2.Attribute; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Cinii + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String urlSearch; + + @Autowired + private LiveImportClient liveImportClient; + + @Autowired + private ConfigurationService configurationService; + + @Override + public String getImportSource() { + return "cinii"; + } + + @Override + public void init() throws Exception {} + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(id)); + return CollectionUtils.isNotEmpty(records) ? records.get(0) : null; + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(query)); + return CollectionUtils.isNotEmpty(records) ? records.get(0) : null; + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + return retry(new FindMatchingRecordCallable(query)); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Cinii"); + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getUrlSearch() { + return urlSearch; + } + + public void setUrlSearch(String urlSearch) { + this.urlSearch = urlSearch; + } + + /** + * This class is a Callable implementation to get CiNii entries based on + * query object. + * + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List records = new LinkedList(); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String queryString = query.getParameterAsClass("query", String.class); + String appId = configurationService.getProperty("cinii.appid"); + List ids = getCiniiIds(appId, count, null, null, null, start, queryString); + if (CollectionUtils.isNotEmpty(ids)) { + for (String id : ids) { + List tmp = search(id, appId); + if (CollectionUtils.isNotEmpty(tmp)) { + tmp.forEach(x -> x.addValue(createIdentifier(id))); + } + records.addAll(tmp); + } + } + return records; + } + } + + /** + * This class is a Callable implementation to get an CiNii entry using CiNii ID + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + String appId = configurationService.getProperty("cinii.appid"); + String id = query.getParameterAsClass("id", String.class); + List importRecord = search(id, appId); + if (CollectionUtils.isNotEmpty(importRecord)) { + importRecord.forEach(x -> x.addValue(createIdentifier(id))); + } + return importRecord; + } + } + + /** + * This class is a Callable implementation to search CiNii entries + * using author, title and year. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + List records = new LinkedList(); + String title = query.getParameterAsClass("title", String.class); + String author = query.getParameterAsClass("author", String.class); + Integer year = query.getParameterAsClass("year", Integer.class); + Integer maxResult = query.getParameterAsClass("maxResult", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String appId = configurationService.getProperty("cinii.appid"); + List ids = getCiniiIds(appId, maxResult, author, title, year, start, null); + if (CollectionUtils.isNotEmpty(ids)) { + for (String id : ids) { + List importRecords = search(id, appId); + if (CollectionUtils.isNotEmpty(importRecords)) { + importRecords.forEach(x -> x.addValue(createIdentifier(id))); + } + records.addAll(importRecords); + } + } + return records; + } + + } + + /** + * This class is a Callable implementation to count the number + * of entries for an CiNii query. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class CountByQueryCallable implements Callable { + private Query query; + + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + String appId = configurationService.getProperty("cinii.appid"); + String queryString = query.getParameterAsClass("query", String.class); + return countCiniiElement(appId, null, null, null, null, null, queryString); + } + } + + /** + * Get metadata by searching CiNii RDF API with CiNii NAID + * + * @param id CiNii NAID to search by + * @param appId registered application identifier for the API + * @return record metadata + * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. + * @throws HttpException Represents a XML/HTTP fault and provides access to the HTTP status code. + */ + protected List search(String id, String appId) + throws IOException, HttpException { + try { + List records = new LinkedList(); + URIBuilder uriBuilder = new URIBuilder(this.url + id + ".rdf?appid=" + appId); + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + List elements = splitToRecords(response); + for (Element record : elements) { + records.add(transformSourceRecords(record)); + } + return records; + } catch (URISyntaxException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + } + + private List splitToRecords(String recordsSrc) { + try { + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + return root.getChildren(); + } catch (JDOMException | IOException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + } + + /** + * Returns a list of uri links (for example:https://cir.nii.ac.jp/crid/123456789) + * to the searched CiNii articles + * + * @param appId Application ID + * @param maxResult The number of search results per page + * @param author Author name + * @param title Article name + * @param year Year of publication + * @param start Start number for the acquired search result list + * @param query Keyword to be searched + */ + private List getCiniiIds(String appId, Integer maxResult, String author, String title, + Integer year, Integer start, String query) { + try { + List ids = new ArrayList<>(); + URIBuilder uriBuilder = new URIBuilder(this.urlSearch); + uriBuilder.addParameter("format", "rss"); + if (StringUtils.isNotBlank(appId)) { + uriBuilder.addParameter("appid", appId); + } + if (Objects.nonNull(maxResult) && maxResult != 0) { + uriBuilder.addParameter("count", maxResult.toString()); + } + if (Objects.nonNull(start)) { + uriBuilder.addParameter("start", start.toString()); + } + if (StringUtils.isNotBlank(title)) { + uriBuilder.addParameter("title", title); + } + if (StringUtils.isNotBlank(author)) { + uriBuilder.addParameter("author", author); + } + if (StringUtils.isNotBlank(query)) { + uriBuilder.addParameter("q", query); + } + if (Objects.nonNull(year) && year != -1 && year != 0) { + uriBuilder.addParameter("year_from", String.valueOf(year)); + uriBuilder.addParameter("year_to", String.valueOf(year)); + } + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + int url_len = this.url.length() - 1; + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + List namespaces = Arrays.asList( + Namespace.getNamespace("ns", "http://purl.org/rss/1.0/"), + Namespace.getNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#")); + XPathExpression xpath = XPathFactory.instance().compile("//ns:item/@rdf:about", + Filters.attribute(), null, namespaces); + List recordsList = xpath.evaluate(root); + for (Attribute item : recordsList) { + String value = item.getValue(); + if (value.length() > url_len) { + ids.add(value.substring(url_len + 1)); + } + } + return ids; + } catch (JDOMException | IOException | URISyntaxException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + } + + /** + * Returns the total number of CiNii articles returned by a specific query + * + * @param appId Application ID + * @param maxResult The number of search results per page + * @param author Author name + * @param title Article name + * @param year Year of publication + * @param start Start number for the acquired search result list + * @param query Keyword to be searched + */ + private Integer countCiniiElement(String appId, Integer maxResult, String author, String title, + Integer year, Integer start, String query) { + try { + URIBuilder uriBuilder = new URIBuilder(this.urlSearch); + uriBuilder.addParameter("format", "rss"); + uriBuilder.addParameter("appid", appId); + if (Objects.nonNull(maxResult) && maxResult != 0) { + uriBuilder.addParameter("count", maxResult.toString()); + } + if (Objects.nonNull(start)) { + uriBuilder.addParameter("start", start.toString()); + } + if (StringUtils.isNotBlank(title)) { + uriBuilder.addParameter("title", title); + } + if (StringUtils.isNotBlank(author)) { + uriBuilder.addParameter("author", author); + } + if (StringUtils.isNotBlank(query)) { + uriBuilder.addParameter("q", query); + } + if (Objects.nonNull(year) && year != -1 && year != 0) { + uriBuilder.addParameter("year_from", String.valueOf(year)); + uriBuilder.addParameter("year_to", String.valueOf(year)); + } + + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + List namespaces = Arrays + .asList(Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/")); + XPathExpression xpath = XPathFactory.instance().compile("//opensearch:totalResults", + Filters.element(), null, namespaces); + List nodes = xpath.evaluate(root); + if (nodes != null && !nodes.isEmpty()) { + return Integer.parseInt(((Element) nodes.get(0)).getText()); + } + return 0; + } catch (JDOMException | IOException | URISyntaxException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + } + + private MetadatumDTO createIdentifier(String id) { + MetadatumDTO metadatumDTO = new MetadatumDTO(); + metadatumDTO.setSchema("dc"); + metadatumDTO.setElement("identifier"); + metadatumDTO.setQualifier("other"); + metadatumDTO.setValue(id); + return metadatumDTO; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefAuthorMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefAuthorMetadataProcessor.java new file mode 100644 index 000000000000..abf84f52d058 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefAuthorMetadataProcessor.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor; + +/** + * This class is used for CrossRef's Live-Import to extract + * attributes such as "given" and "family" from the array of authors/editors + * and return them concatenated. + * Beans are configured in the crossref-integration.xml file. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class CrossRefAuthorMetadataProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator authors = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (authors.hasNext()) { + JsonNode author = authors.next(); + String givenName = author.at("/given").textValue(); + String familyName = author.at("/family").textValue(); + if (StringUtils.isNoneBlank(givenName) && StringUtils.isNoneBlank(familyName)) { + values.add(givenName + " " + familyName); + } + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java new file mode 100644 index 000000000000..c83abbf2b285 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor; + +/** + * This class is used for CrossRef's Live-Import to extract + * issued attribute. + * Beans are configured in the {@code crossref-integration.xml} file. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class CrossRefDateMetadataProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator dates = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (dates.hasNext()) { + JsonNode date = dates.next(); + LocalDate issuedDate = null; + DateTimeFormatter issuedDateFormat = null; + if (date.has(0) && date.has(1) && date.has(2)) { + issuedDate = LocalDate.of( + date.get(0).numberValue().intValue(), + date.get(1).numberValue().intValue(), + date.get(2).numberValue().intValue()); + issuedDateFormat = DateTimeFormatter.ISO_LOCAL_DATE; + } else if (date.has(0) && date.has(1)) { + issuedDate = LocalDate.of(date.get(0).numberValue().intValue(), + date.get(1).numberValue().intValue(), + 1); + issuedDateFormat = DateTimeFormatter.ofPattern("yyyy-MM"); + } else if (date.has(0)) { + issuedDate = LocalDate.of(date.get(0).numberValue().intValue(), + 1, + 1); + issuedDateFormat = DateTimeFormatter.ofPattern("yyyy"); + } + values.add(issuedDate.format(issuedDateFormat)); + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefFieldMapping.java new file mode 100644 index 000000000000..5e879b4d266e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefFieldMapping.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the CrossRef metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class CrossRefFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "crossrefMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..71b088ff162b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java @@ -0,0 +1,342 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.net.URLDecoder; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.DoiCheck; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying CrossRef + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public String getImportSource() { + return "crossref"; + } + + @Override + public void init() throws Exception {} + + @Override + public ImportRecord getRecord(String recordId) throws MetadataSourceException { + String id = getID(recordId); + List records = StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new SearchByIdCallable(recordId)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + String id = getID(query); + return StringUtils.isNotBlank(id) ? retry(new DoiCheckCallable(id)) : retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return StringUtils.isNotBlank(id) ? retry(new DoiCheckCallable(id)) : retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + String id = getID(query.toString()); + return StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + if (StringUtils.isNotBlank(id)) { + return retry(new SearchByIdCallable(id)); + } + return retry(new SearchByQueryCallable(query)); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + List records = StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new SearchByIdCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new FindMatchingRecordCallable(query)); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for CrossRef"); + } + + public String getID(String id) { + return DoiCheck.isDoi(id) ? "filter=doi:" + id : StringUtils.EMPTY; + } + + /** + * This class is a Callable implementation to get CrossRef entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class)); + if (Objects.nonNull(count)) { + uriBuilder.addParameter("rows", count.toString()); + } + if (Objects.nonNull(start)) { + uriBuilder.addParameter("offset", start.toString()); + } + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(response); + Iterator nodes = jsonNode.at("/message/items").iterator(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + if (!node.isMissingNode()) { + results.add(transformSourceRecords(node.toString())); + } + } + return results; + } + + } + + /** + * This class is a Callable implementation to get an CrossRef entry using DOI + * The DOI to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String ID = URLDecoder.decode(query.getParameterAsClass("id", String.class), "UTF-8"); + URIBuilder uriBuilder = new URIBuilder(url + "/" + ID); + Map> params = new HashMap>(); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + JsonNode messageNode = jsonNode.at("/message"); + if (!messageNode.isMissingNode()) { + results.add(transformSourceRecords(messageNode.toString())); + } + return results; + } + } + + /** + * This class is a Callable implementation to search CrossRef entries using author and title. + * There are two field in the Query map to pass, with keys "title" and "author" + * (at least one must be used). + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + String queryValue = query.getParameterAsClass("query", String.class); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String author = query.getParameterAsClass("author", String.class); + String title = query.getParameterAsClass("title", String.class); + String bibliographics = query.getParameterAsClass("bibliographics", String.class); + List results = new ArrayList<>(); + URIBuilder uriBuilder = new URIBuilder(url); + if (Objects.nonNull(queryValue)) { + uriBuilder.addParameter("query", queryValue); + } + if (Objects.nonNull(count)) { + uriBuilder.addParameter("rows", count.toString()); + } + if (Objects.nonNull(start)) { + uriBuilder.addParameter("offset", start.toString()); + } + if (Objects.nonNull(author)) { + uriBuilder.addParameter("query.author", author); + } + if (Objects.nonNull(title )) { + uriBuilder.addParameter("query.container-title", title); + } + if (Objects.nonNull(bibliographics)) { + uriBuilder.addParameter("query.bibliographic", bibliographics); + } + Map> params = new HashMap>(); + String resp = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + Iterator nodes = jsonNode.at("/message/items").iterator(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + if (!node.isMissingNode()) { + results.add(transformSourceRecords(node.toString())); + } + } + return results; + } + + } + + /** + * This class is a Callable implementation to count the number of entries for an CrossRef query. + * This Callable use as query value to CrossRef the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + + private Query query; + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class)); + Map> params = new HashMap>(); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + return jsonNode.at("/message/total-results").asInt(); + } + } + + /** + * This class is a Callable implementation to check if exist an CrossRef entry using DOI. + * The DOI to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * return 1 if CrossRef entry exists otherwise 0 + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class DoiCheckCallable implements Callable { + + private final Query query; + + private DoiCheckCallable(final String id) { + final Query query = new Query(); + query.addParameter("id", id); + this.query = query; + } + + private DoiCheckCallable(final Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + Map> params = new HashMap>(); + URIBuilder uriBuilder = new URIBuilder(url + "/" + query.getParameterAsClass("id", String.class)); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + return StringUtils.equals(jsonNode.at("/status").toString(), "ok") ? 1 : 0; + } + } + + private JsonNode convertStringJsonToJsonNode(String json) { + try { + return new ObjectMapper().readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return null; + } + + public void setUrl(String url) { + this.url = url; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteFieldMapping.java new file mode 100644 index 000000000000..f8540307b916 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteFieldMapping.java @@ -0,0 +1,38 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.datacite; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the datacite metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * @author Florian Gantner (florian.gantner@uni-bamberg.de) + */ +public class DataCiteFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "dataciteMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..a11f2bc2471d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/datacite/DataCiteImportMetadataSourceServiceImpl.java @@ -0,0 +1,168 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.datacite; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.DoiCheck; +import org.dspace.importer.external.service.components.QuerySource; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Datacite + * Mainly copied from CrossRefImportMetadataSourceServiceImpl. + * + * optional Affiliation informations are not part of the API request. + * https://support.datacite.org/docs/can-i-see-more-detailed-affiliation-information-in-the-rest-api + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + * @author Florian Gantner (florian.gantner@uni-bamberg.de) + * + */ +public class DataCiteImportMetadataSourceServiceImpl + extends AbstractImportMetadataSourceService implements QuerySource { + private final static Logger log = LogManager.getLogger(); + + @Autowired + private LiveImportClient liveImportClient; + + @Autowired + private ConfigurationService configurationService; + + @Override + public String getImportSource() { + return "datacite"; + } + + @Override + public void init() throws Exception { + } + + @Override + public ImportRecord getRecord(String recordId) throws MetadataSourceException { + Collection records = getRecords(recordId, 0, 1); + if (records.size() == 0) { + return null; + } + return records.stream().findFirst().get(); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + Collection records = getRecords(query, 0, -1); + return records == null ? 0 : records.size(); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return getRecordsCount(StringUtils.isBlank(id) ? query.toString() : id); + } + + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + List records = new ArrayList<>(); + String id = getID(query); + Map> params = new HashMap<>(); + Map uriParameters = new HashMap<>(); + params.put("uriParameters", uriParameters); + if (StringUtils.isBlank(id)) { + id = query; + } + uriParameters.put("query", id); + int timeoutMs = configurationService.getIntProperty("datacite.timeout", 180000); + String url = configurationService.getProperty("datacite.url", "https://api.datacite.org/dois/"); + String responseString = liveImportClient.executeHttpGetRequest(timeoutMs, url, params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + if (jsonNode == null) { + log.warn("DataCite returned invalid JSON"); + return records; + } + JsonNode dataNode = jsonNode.at("/data"); + if (dataNode.isArray()) { + Iterator iterator = dataNode.iterator(); + while (iterator.hasNext()) { + JsonNode singleDoiNode = iterator.next(); + String json = singleDoiNode.at("/attributes").toString(); + records.add(transformSourceRecords(json)); + } + } else { + String json = dataNode.at("/attributes").toString(); + records.add(transformSourceRecords(json)); + } + + return records; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + try { + return new ObjectMapper().readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return null; + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return getRecords(StringUtils.isBlank(id) ? query.toString() : id, 0, -1); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return getRecord(StringUtils.isBlank(id) ? query.toString() : id); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return getRecords(StringUtils.isBlank(id) ? query.toString() : id, 0, -1); + } + + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for DataCite"); + } + + public String getID(String query) { + if (DoiCheck.isDoi(query)) { + return query; + } + // Workaround for encoded slashes. + if (query.contains("%252F")) { + query = query.replace("%252F", "/"); + } + if (DoiCheck.isDoi(query)) { + return query; + } + return StringUtils.EMPTY; + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java index f5d3a6f722f4..3fc34dc51102 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/ImportRecord.java @@ -7,9 +7,9 @@ */ package org.dspace.importer.external.datamodel; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.LinkedList; import java.util.List; import org.dspace.importer.external.metadatamapping.MetadatumDTO; @@ -38,7 +38,7 @@ public List getValueList() { */ public ImportRecord(List valueList) { //don't want to alter the original list. Also now I can control the type of list - this.valueList = new LinkedList<>(valueList); + this.valueList = new ArrayList<>(valueList); } /** @@ -81,7 +81,7 @@ public String toString() { * @return the MetadatumDTO's that are related to a given schema/element/qualifier pair/triplet */ public Collection getValue(String schema, String element, String qualifier) { - List values = new LinkedList(); + List values = new ArrayList(); for (MetadatumDTO value : valueList) { if (value.getSchema().equals(schema) && value.getElement().equals(element)) { if (qualifier == null && value.getQualifier() == null) { diff --git a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoFieldMapping.java new file mode 100644 index 000000000000..64ec53ffb92b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoFieldMapping.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.epo.service; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Epo metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class EpoFieldMapping extends AbstractMetadataFieldMapping { + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "epoMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..fbae302bca6a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java @@ -0,0 +1,547 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.epo.service; + +import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS; + +import java.io.IOException; +import java.io.StringReader; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpException; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.xerces.impl.dv.util.Base64; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.contributor.EpoIdMetadataContributor.EpoDocumentId; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.jaxen.JaxenException; +import org.jdom2.Attribute; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying EPO + * + * @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it) + */ +public class EpoImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String authUrl; + private String searchUrl; + + private String consumerKey; + private String consumerSecret; + + private MetadataFieldConfig dateFiled; + private MetadataFieldConfig applicationNumber; + + public static final String APP_NO_DATE_SEPARATOR = "$$$"; + private static final String APP_NO_DATE_SEPARATOR_REGEX = "\\$\\$\\$"; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public void init() throws Exception {} + + /** + * The string that identifies this import implementation. Preferable a URI + * + * @return the identifying uri + */ + @Override + public String getImportSource() { + return "epo"; + } + + /** + * Set the customer epo key + * @param consumerKey the customer consumer key + */ + public void setConsumerKey(String consumerKey) { + this.consumerKey = consumerKey; + } + + public String getConsumerKey() { + return consumerKey; + } + + /** + * Set the costumer epo secret + * @param consumerSecret the customer epo secret + */ + public void setConsumerSecret(String consumerSecret) { + this.consumerSecret = consumerSecret; + } + + public String getConsumerSecret() { + return consumerSecret; + } + + public void setDateFiled(MetadataFieldConfig dateFiled) { + this.dateFiled = dateFiled; + } + + public MetadataFieldConfig getDateFiled() { + return dateFiled; + } + + public void setApplicationNumber(MetadataFieldConfig applicationNumber) { + this.applicationNumber = applicationNumber; + } + + public MetadataFieldConfig getApplicationNumber() { + return applicationNumber; + } + + /*** + * Log to EPO, bearer is valid for 20 minutes + * + * @param consumerKey The consumer Key + * @param consumerSecretKey The consumer secret key + * @return + * @throws IOException + * @throws HttpException + */ + protected String login() throws IOException, HttpException { + Map> params = getLoginParams(); + String entity = "grant_type=client_credentials"; + String json = liveImportClient.executeHttpPostRequest(this.authUrl, params, entity); + ObjectMapper mapper = new ObjectMapper(new JsonFactory()); + JsonNode rootNode = mapper.readTree(json); + JsonNode accessTokenNode = rootNode.get("access_token"); + return accessTokenNode.asText(); + } + + private Map> getLoginParams() { + Map> params = new HashMap>(); + Map headerParams = getLoginHeaderParams(); + params.put(HEADER_PARAMETERS, headerParams); + return params; + } + + private Map getLoginHeaderParams() { + Map params = new HashMap(); + String authString = consumerKey + ":" + consumerSecret; + params.put("Authorization", "Basic " + Base64.encode(authString.getBytes())); + params.put("Content-type", "application/x-www-form-urlencoded"); + return params; + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new CountRecordsCallable(query, bearer)); + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return 0; + + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new CountRecordsCallable(query, bearer)); + } catch (IOException | HttpException e) { + e.printStackTrace(); + } + } + return 0; + } + + @Override + public Collection getRecords(String query, int start, + int count) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new SearchByQueryCallable(query, bearer, start, count)); + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return new ArrayList(); + } + + @Override + public Collection getRecords(Query query) + throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new SearchByQueryCallable(query, bearer)); + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return new ArrayList(); + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + List list = retry(new SearchByIdCallable(id, bearer)); + return CollectionUtils.isNotEmpty(list) ? list.get(0) : null; + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return null; + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + return null; + } + + @Override + public Collection findMatchingRecords(Item item) + throws MetadataSourceException { + return null; + } + + @Override + public Collection findMatchingRecords(Query query) + throws MetadataSourceException { + return null; + } + + /** + * This class is a Callable implementation to count the number of entries for an EPO query. + * This Callable use as query value to EPO the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountRecordsCallable implements Callable { + + private String bearer; + private String query; + + private CountRecordsCallable(Query query, String bearer) { + this.query = query.getParameterAsClass("query", String.class); + this.bearer = bearer; + } + + private CountRecordsCallable(String query, String bearer) { + this.query = query; + this.bearer = bearer; + } + + public Integer call() throws Exception { + return countDocument(bearer, query); + } + } + + /** + * This class is a Callable implementation to get an EPO entry using epodocID (epodoc:AB1234567T) + * The epodocID to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + + private String id; + private String bearer; + + private SearchByIdCallable(String id, String bearer) { + this.id = id; + this.bearer = bearer; + } + + public List call() throws Exception { + int positionToSplit = id.indexOf(":"); + String docType = EpoDocumentId.EPODOC; + String idS = id; + if (positionToSplit != -1) { + docType = id.substring(0, positionToSplit); + idS = id.substring(positionToSplit + 1, id.length()); + } else if (id.contains(APP_NO_DATE_SEPARATOR)) { + // special case the id is the combination of the applicationnumber and date filed + String query = "applicationnumber=" + id.split(APP_NO_DATE_SEPARATOR_REGEX)[0]; + SearchByQueryCallable search = new SearchByQueryCallable(query, bearer, 0, 10); + List records = search.call().stream() + .filter(r -> r.getValue(dateFiled.getSchema(), dateFiled.getElement(), + dateFiled.getQualifier()) + .stream() + .anyMatch(m -> StringUtils.equals(m.getValue(), + id.split(APP_NO_DATE_SEPARATOR_REGEX)[1]) + )) + .limit(1).collect(Collectors.toList()); + return records; + } + List records = searchDocument(bearer, idS, docType); + if (records.size() > 1) { + log.warn("More record are returned with epocID " + id); + } + return records; + } + } + + /** + * This class is a Callable implementation to get EPO entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + private Integer start; + private Integer count; + private String bearer; + + private SearchByQueryCallable(Query query, String bearer) { + this.query = query; + this.bearer = bearer; + } + + public SearchByQueryCallable(String queryValue, String bearer, int start, int count) { + this.query = new Query(); + query.addParameter("query", queryValue); + this.start = query.getParameterAsClass("start", Integer.class) != null ? + query.getParameterAsClass("start", Integer.class) : 0; + this.count = query.getParameterAsClass("count", Integer.class) != null ? + query.getParameterAsClass("count", Integer.class) : 20; + this.bearer = bearer; + } + + @Override + public List call() throws Exception { + List records = new ArrayList(); + String queryString = query.getParameterAsClass("query", String.class); + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + if (StringUtils.isNotBlank(queryString) && StringUtils.isNotBlank(bearer)) { + List epoDocIds = searchDocumentIds(bearer, queryString, start + 1, count); + for (EpoDocumentId epoDocId : epoDocIds) { + List recordfounds = searchDocument(bearer, epoDocId); + if (recordfounds.size() > 1) { + log.warn("More record are returned with epocID " + epoDocId.toString()); + } + records.addAll(recordfounds); + } + } + + } + return records; + } + } + + private Integer countDocument(String bearer, String query) { + if (StringUtils.isBlank(bearer)) { + return null; + } + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + bearer); + headerParameters.put("X-OPS-Range", "1-1"); + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.searchUrl); + uriBuilder.addParameter("q", query); + + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList( + Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"), + Namespace.getNamespace("ops", "http://ops.epo.org"), + Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + + String totalRes = getElement(root, namespaces, "//ops:biblio-search/@total-result-count"); + return Integer.parseInt(totalRes); + } catch (JDOMException | IOException | URISyntaxException | JaxenException e) { + log.error(e.getMessage(), e); + return null; + } + } + + private List searchDocumentIds(String bearer, String query, int start, int count) { + List results = new ArrayList(); + int end = start + count; + if (StringUtils.isBlank(bearer)) { + return results; + } + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + bearer); + if (start >= 1 && end > start) { + headerParameters.put("X-OPS-Range", start + "-" + end); + } + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.searchUrl); + uriBuilder.addParameter("q", query); + + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList( + Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"), + Namespace.getNamespace("ops", "http://ops.epo.org"), + Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + XPathExpression xpath = XPathFactory.instance() + .compile("//ns:document-id", Filters.element(), null, namespaces); + + List documentIds = xpath.evaluate(root); + for (Element documentId : documentIds) { + results.add(new EpoDocumentId(documentId, namespaces)); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + return results; + } + + private List searchDocument(String bearer, EpoDocumentId id) { + return searchDocument(bearer, id.getId(), id.getDocumentIdType()); + } + + private List searchDocument(String bearer, String id, String docType) { + List results = new ArrayList(); + if (StringUtils.isBlank(bearer)) { + return results; + } + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + bearer); + params.put(HEADER_PARAMETERS, headerParameters); + + String url = this.url.replace("$(doctype)", docType).replace("$(id)", id); + + String response = liveImportClient.executeHttpGetRequest(1000, url, params); + List elements = splitToRecords(response); + for (Element element : elements) { + results.add(transformSourceRecords(element)); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + return results; + } + + private List splitToRecords(String recordsSrc) { + try { + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + List namespaces = Arrays.asList(Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + XPathExpression xpath = XPathFactory.instance().compile("//ns:exchange-document", + Filters.element(), null, namespaces); + + List recordsList = xpath.evaluate(root); + return recordsList; + } catch (JDOMException | IOException e) { + log.error(e.getMessage(), e); + return new LinkedList(); + } + } + + private String getElement(Element document, List namespaces, String path) throws JaxenException { + XPathExpression xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null, namespaces); + List nodes = xpath.evaluate(document); + //exactly one element expected for any field + if (CollectionUtils.isEmpty(nodes)) { + return StringUtils.EMPTY; + } else { + return getValue(nodes.get(0)); + } + } + + private String getValue(Object el) { + if (el instanceof Element) { + return ((Element) el).getText(); + } else if (el instanceof Attribute) { + return ((Attribute) el).getValue(); + } else if (el instanceof String) { + return (String)el; + } else if (el instanceof Text) { + return ((Text) el).getText(); + } else { + log.error("node of type: " + el.getClass()); + return ""; + } + } + + public void setUrl(String url) { + this.url = url; + } + + public void setAuthUrl(String authUrl) { + this.authUrl = authUrl; + } + + public void setSearchUrl(String searchUrl) { + this.searchUrl = searchUrl; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClient.java b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClient.java new file mode 100644 index 000000000000..a1132cda9ce2 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClient.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.liveimportclient.service; + +import java.util.Map; + +/** + * Interface for classes that allow to contact LiveImport clients. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public interface LiveImportClient { + + /** + * Http GET request + * + * @param timeout The connect timeout in milliseconds + * @param URL URL + * @param params This map contains the parameters to be included in the request. + * Each parameter will be added to the url?(key=value) + * @return The response in String type converted from InputStream + */ + public String executeHttpGetRequest(int timeout, String URL, Map> params); + + /** + * Http POST request + * + * @param URL URL + * @param params This map contains the header params to be included in the request. + * @param entry the entity value + * @return the response in String type converted from InputStream + */ + public String executeHttpPostRequest(String URL, Map> params, String entry); +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java new file mode 100644 index 000000000000..1a8a7a7861ed --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java @@ -0,0 +1,194 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.liveimportclient.service; + +import java.io.InputStream; +import java.net.URISyntaxException; +import java.nio.charset.Charset; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; + +import org.apache.commons.collections.MapUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.config.RequestConfig.Builder; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link LiveImportClient}. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science dot com) + */ +public class LiveImportClientImpl implements LiveImportClient { + + private final static Logger log = LogManager.getLogger(); + + public static final String URI_PARAMETERS = "uriParameters"; + public static final String HEADER_PARAMETERS = "headerParameters"; + + private CloseableHttpClient httpClient; + + @Autowired + private ConfigurationService configurationService; + + @Override + public String executeHttpGetRequest(int timeout, String URL, Map> params) { + HttpGet method = null; + try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient) + .orElseGet(HttpClients::createDefault)) { + + Builder requestConfigBuilder = RequestConfig.custom(); + requestConfigBuilder.setConnectionRequestTimeout(timeout); + RequestConfig defaultRequestConfig = requestConfigBuilder.build(); + + String uri = buildUrl(URL, params.get(URI_PARAMETERS)); + method = new HttpGet(uri); + method.setConfig(defaultRequestConfig); + + Map headerParams = params.get(HEADER_PARAMETERS); + if (MapUtils.isNotEmpty(headerParams)) { + for (String param : headerParams.keySet()) { + method.setHeader(param, headerParams.get(param)); + } + } + + configureProxy(method, defaultRequestConfig); + if (log.isDebugEnabled()) { + log.debug("Performing GET request to \"" + uri + "\"..."); + } + HttpResponse httpResponse = httpClient.execute(method); + if (isNotSuccessfull(httpResponse)) { + throw new RuntimeException("The request failed with: " + getStatusCode(httpResponse) + " code, reason= " + + httpResponse.getStatusLine().getReasonPhrase()); + } + InputStream inputStream = httpResponse.getEntity().getContent(); + return IOUtils.toString(inputStream, Charset.defaultCharset()); + } catch (Exception e1) { + log.error(e1.getMessage(), e1); + } finally { + if (Objects.nonNull(method)) { + method.releaseConnection(); + } + } + return StringUtils.EMPTY; + } + + @Override + public String executeHttpPostRequest(String URL, Map> params, String entry) { + HttpPost method = null; + try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient) + .orElseGet(HttpClients::createDefault)) { + + Builder requestConfigBuilder = RequestConfig.custom(); + RequestConfig defaultRequestConfig = requestConfigBuilder.build(); + + String uri = buildUrl(URL, params.get(URI_PARAMETERS)); + method = new HttpPost(uri); + method.setConfig(defaultRequestConfig); + if (StringUtils.isNotBlank(entry)) { + method.setEntity(new StringEntity(entry)); + } + setHeaderParams(method, params); + + configureProxy(method, defaultRequestConfig); + if (log.isDebugEnabled()) { + log.debug("Performing POST request to \"" + uri + "\"..." ); + } + HttpResponse httpResponse = httpClient.execute(method); + if (isNotSuccessfull(httpResponse)) { + throw new RuntimeException(); + } + InputStream inputStream = httpResponse.getEntity().getContent(); + return IOUtils.toString(inputStream, Charset.defaultCharset()); + } catch (Exception e1) { + log.error(e1.getMessage(), e1); + } finally { + if (Objects.nonNull(method)) { + method.releaseConnection(); + } + } + return StringUtils.EMPTY; + } + + private void configureProxy(HttpRequestBase method, RequestConfig defaultRequestConfig) { + String proxyHost = configurationService.getProperty("http.proxy.host"); + String proxyPort = configurationService.getProperty("http.proxy.port"); + if (StringUtils.isNotBlank(proxyHost) && StringUtils.isNotBlank(proxyPort)) { + RequestConfig requestConfig = RequestConfig.copy(defaultRequestConfig) + .setProxy(new HttpHost(proxyHost, Integer.parseInt(proxyPort), "http")) + .build(); + method.setConfig(requestConfig); + } + } + + /** + * Allows to set the header parameters to the HTTP Post method + * + * @param method HttpPost method + * @param params This map contains the header params to be included in the request. + */ + private void setHeaderParams(HttpPost method, Map> params) { + Map headerParams = params.get(HEADER_PARAMETERS); + if (MapUtils.isNotEmpty(headerParams)) { + for (String param : headerParams.keySet()) { + method.setHeader(param, headerParams.get(param)); + } + } + } + + /** + * This method allows you to add the parameters contained in the requestParams map to the URL + * + * @param URL URL + * @param requestParams This map contains the parameters to be included in the request. + * Each parameter will be added to the url?(key=value) + * @return + * @throws URISyntaxException + */ + private String buildUrl(String URL, Map requestParams) throws URISyntaxException { + URIBuilder uriBuilder = new URIBuilder(URL); + if (MapUtils.isNotEmpty(requestParams)) { + for (String param : requestParams.keySet()) { + uriBuilder.setParameter(param, requestParams.get(param)); + } + } + return uriBuilder.toString(); + } + + private boolean isNotSuccessfull(HttpResponse response) { + int statusCode = getStatusCode(response); + return statusCode < 200 || statusCode > 299; + } + + private int getStatusCode(HttpResponse response) { + return response.getStatusLine().getStatusCode(); + } + + public CloseableHttpClient getHttpClient() { + return httpClient; + } + + public void setHttpClient(CloseableHttpClient httpClient) { + this.httpClient = httpClient; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/AbstractMetadataFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/AbstractMetadataFieldMapping.java index aed2f0e08444..d2f0df6d04b9 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/AbstractMetadataFieldMapping.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/AbstractMetadataFieldMapping.java @@ -7,18 +7,19 @@ */ package org.dspace.importer.external.metadatamapping; +import java.util.ArrayList; import java.util.Collection; -import java.util.LinkedList; import java.util.List; import java.util.Map; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor; import org.dspace.importer.external.metadatamapping.transform.MetadataProcessorService; /** - * Abstract class that implements {@link MetadataFieldMapping} - * This class adds a default implementation for the MetadataFieldMapping methods + * Abstract class that implements {@link MetadataFieldMapping}. + * This class adds a default implementation for the MetadataFieldMapping methods. * * @author Roeland Dillen (roeland at atmire dot com) */ @@ -30,7 +31,7 @@ public abstract class AbstractMetadataFieldMapping /** * log4j logger */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(AbstractMetadataFieldMapping.class); + private static final Logger log = LogManager.getLogger(AbstractMetadataFieldMapping.class); /* A map containing what processing has to be done on a given metadataFieldConfig. * The processing of a value is used to determine the actual value that will be returned used. @@ -66,6 +67,7 @@ public MetadataProcessorService getMetadataProcessor(MetadataFieldConfig metadat * @param value The value to map to a MetadatumDTO * @return A metadatumDTO created from the field and value */ + @Override public MetadatumDTO toDCValue(MetadataFieldConfig field, String value) { MetadatumDTO dcValue = new MetadatumDTO(); @@ -108,14 +110,15 @@ public void setMetadataFieldMap(Map resultToDCValueMapping(RecordType record) { - List values = new LinkedList(); + List values = new ArrayList<>(); for (MetadataContributor query : getMetadataFieldMap().values()) { try { diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/MetadataFieldConfig.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/MetadataFieldConfig.java index d19939248c42..be3c85ab620e 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/MetadataFieldConfig.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/MetadataFieldConfig.java @@ -8,7 +8,7 @@ package org.dspace.importer.external.metadatamapping; /** - * A generalised configuration for metadatafields. + * A generalised configuration for metadata fields. * This is used to make the link between values and the actual MetadatumDTO object. * * @author Roeland Dillen (roeland at atmire dot com) @@ -31,7 +31,7 @@ public boolean equals(Object o) { if (this == o) { return true; } - if (o == null || getClass() != o.getClass()) { + if (o == null || !(o instanceof MetadataFieldConfig)) { return false; } @@ -43,11 +43,7 @@ public boolean equals(Object o) { if (qualifier != null ? !qualifier.equals(that.qualifier) : that.qualifier != null) { return false; } - if (!schema.equals(that.schema)) { - return false; - } - - return true; + return schema.equals(that.schema); } /** diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ArrayElementAttributeProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ArrayElementAttributeProcessor.java new file mode 100644 index 000000000000..b938a290c297 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ArrayElementAttributeProcessor.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * This Processor allows to extract attribute values of an array. + * For exaple to extract all values of secondAttribute, + * "array":[ + * { + * "firstAttribute":"first value", + * "secondAttribute":"second value" + * }, + * { + * "firstAttribute":"first value", + * "secondAttribute":"second value" + * } + * ] + * + * it's possible configure a bean with + * pathToArray=/array and elementAttribute=/secondAttribute + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class ArrayElementAttributeProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + private String elementAttribute; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator array = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (array.hasNext()) { + JsonNode element = array.next(); + String value = element.at(elementAttribute).textValue(); + if (StringUtils.isNoneBlank(value)) { + values.add(value); + } + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + + public void setElementAttribute(String elementAttribute) { + this.elementAttribute = elementAttribute; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/AuthorMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/AuthorMetadataContributor.java new file mode 100644 index 000000000000..26063dc7441d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/AuthorMetadataContributor.java @@ -0,0 +1,173 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.apache.commons.lang.StringUtils; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jaxen.JaxenException; +import org.jdom2.Element; +import org.jdom2.Namespace; + +/** + * Scopus specific implementation of {@link MetadataContributor} + * Responsible for generating the ScopusID, orcid, author name and affiliationID + * from the retrieved item. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it) + */ +public class AuthorMetadataContributor extends SimpleXpathMetadatumContributor { + + private static final Namespace NAMESPACE = Namespace.getNamespace("http://www.w3.org/2005/Atom"); + + private MetadataFieldConfig orcid; + private MetadataFieldConfig scopusId; + private MetadataFieldConfig authname; + private MetadataFieldConfig affiliation; + + private Map affId2affName = new HashMap(); + + /** + * Retrieve the metadata associated with the given object. + * Depending on the retrieved node (using the query), + * different types of values will be added to the MetadatumDTO list. + * + * @param element A class to retrieve metadata from. + * @return A collection of import records. Only the ScopusID, orcid, author name and affiliation + * of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + List metadatums = null; + fillAffillation(element); + try { + List nodes = element.getChildren("author", NAMESPACE); + for (Element el : nodes) { + metadatums = getMetadataOfAuthors(el); + if (Objects.nonNull(metadatums)) { + for (MetadatumDTO metadatum : metadatums) { + values.add(metadatum); + } + } + } + } catch (JaxenException e) { + throw new RuntimeException(e); + } + return values; + } + + /** + * Retrieve the the ScopusID, orcid, author name and affiliationID + * metadata associated with the given element object. + * If the value retrieved from the element is empty + * it is set PLACEHOLDER_PARENT_METADATA_VALUE + * + * @param element A class to retrieve metadata from + * @throws JaxenException If Xpath evaluation failed + */ + private List getMetadataOfAuthors(Element element) throws JaxenException { + List metadatums = new ArrayList(); + Element authname = element.getChild("authname", NAMESPACE); + Element scopusId = element.getChild("authid", NAMESPACE); + Element orcid = element.getChild("orcid", NAMESPACE); + Element afid = element.getChild("afid", NAMESPACE); + + addMetadatum(metadatums, getMetadata(getElementValue(authname), this.authname)); + addMetadatum(metadatums, getMetadata(getElementValue(scopusId), this.scopusId)); + addMetadatum(metadatums, getMetadata(getElementValue(orcid), this.orcid)); + addMetadatum(metadatums, getMetadata(StringUtils.isNotBlank(afid.getValue()) + ? this.affId2affName.get(afid.getValue()) : null, this.affiliation)); + return metadatums; + } + + private void addMetadatum(List list, MetadatumDTO metadatum) { + if (Objects.nonNull(metadatum)) { + list.add(metadatum); + } + } + + private String getElementValue(Element element) { + if (Objects.nonNull(element)) { + return element.getValue(); + } + return StringUtils.EMPTY; + } + + private MetadatumDTO getMetadata(String value, MetadataFieldConfig metadaConfig) { + if (StringUtils.isBlank(value)) { + return null; + } + MetadatumDTO metadata = new MetadatumDTO(); + metadata.setElement(metadaConfig.getElement()); + metadata.setQualifier(metadaConfig.getQualifier()); + metadata.setSchema(metadaConfig.getSchema()); + metadata.setValue(value); + return metadata; + } + + private void fillAffillation(Element element) { + try { + List nodes = element.getChildren("affiliation", NAMESPACE); + for (Element el : nodes) { + fillAffiliation2Name(el); + } + } catch (JaxenException e) { + throw new RuntimeException(e); + } + } + + private void fillAffiliation2Name(Element element) throws JaxenException { + Element affilationName = element.getChild("affilname", NAMESPACE); + Element affilationId = element.getChild("afid", NAMESPACE); + if (Objects.nonNull(affilationId) && Objects.nonNull(affilationName)) { + affId2affName.put(affilationId.getValue(), affilationName.getValue()); + } + } + + public MetadataFieldConfig getAuthname() { + return authname; + } + + public void setAuthname(MetadataFieldConfig authname) { + this.authname = authname; + } + + public MetadataFieldConfig getOrcid() { + return orcid; + } + + public void setOrcid(MetadataFieldConfig orcid) { + this.orcid = orcid; + } + + public MetadataFieldConfig getScopusId() { + return scopusId; + } + + public void setScopusId(MetadataFieldConfig scopusId) { + this.scopusId = scopusId; + } + + public MetadataFieldConfig getAffiliation() { + return affiliation; + } + + public void setAffiliation(MetadataFieldConfig affiliation) { + this.affiliation = affiliation; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EpoIdMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EpoIdMetadataContributor.java new file mode 100644 index 000000000000..e32f45a4d5f3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EpoIdMetadataContributor.java @@ -0,0 +1,312 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Arrays; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Resource; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jaxen.JaxenException; +import org.jdom2.Attribute; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Required; + +/** + * Custom MetadataContributor to manage Epo ID. + * Need as input element and all children. + * + * @author Pasquale Cavallo + */ +public class EpoIdMetadataContributor implements MetadataContributor { + + protected MetadataFieldConfig field; + + private boolean needType; + + /** + * This property will be used in ID definition. + * If this is true, id will be in the form docType:EpoID, otherwise EpoID will be returned + * + * @param needType if true, docType will be included in id definition + */ + public void setNeedType(boolean needType) { + this.needType = needType; + } + + /** + * Return prefixToNamespaceMapping + * + * @return a prefixToNamespaceMapping map + */ + public Map getPrefixToNamespaceMapping() { + return prefixToNamespaceMapping; + } + + protected MetadataFieldMapping> metadataFieldMapping; + + /** + * Return metadataFieldMapping + * + * @return MetadataFieldMapping + */ + public MetadataFieldMapping> getMetadataFieldMapping() { + return metadataFieldMapping; + } + + /** + * Set the metadataFieldMapping of this SimpleXpathMetadatumContributor + * + * @param metadataFieldMapping the new mapping. + */ + public void setMetadataFieldMapping( + MetadataFieldMapping> metadataFieldMapping) { + this.metadataFieldMapping = metadataFieldMapping; + } + + /** + * Set the prefixToNamespaceMapping for this object, + * + * @param prefixToNamespaceMapping the new mapping. + */ + @Resource(name = "isiFullprefixMapping") + public void setPrefixToNamespaceMapping(Map prefixToNamespaceMapping) { + this.prefixToNamespaceMapping = prefixToNamespaceMapping; + } + + protected Map prefixToNamespaceMapping; + + /** + * Initialize EpoIdMetadataContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig + * + * @param query query string + * @param prefixToNamespaceMapping metadata prefix to namespace mapping + * @param field + * MetadataFieldConfig + */ + public EpoIdMetadataContributor(String query, Map prefixToNamespaceMapping, + MetadataFieldConfig field) { + this.query = query; + this.prefixToNamespaceMapping = prefixToNamespaceMapping; + this.field = field; + } + + /** + * Empty constructor for EpoIdMetadataContributor + */ + public EpoIdMetadataContributor() { + + } + + protected String query; + + /** + * Return the MetadataFieldConfig used while retrieving MetadatumDTO + * + * @return MetadataFieldConfig + */ + public MetadataFieldConfig getField() { + return field; + } + + /** + * Setting the MetadataFieldConfig + * + * @param field MetadataFieldConfig used while retrieving MetadatumDTO + */ + @Required + public void setField(MetadataFieldConfig field) { + this.field = field; + } + + /** + * Return query used to create an xpathExpression on, this query is used to + * + * @return the query this instance is based on + */ + public String getQuery() { + return query; + } + + @Required + public void setQuery(String query) { + this.query = query; + } + + /** + * Retrieve the metadata associated with the given object. + * Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO + * list + * + * @param element A class to retrieve metadata from. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + try { + List namespaces = Arrays.asList( + Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"), + Namespace.getNamespace("ops", "http://ops.epo.org"), + Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.element(), null, + namespaces); + List elements = xpath.evaluate(element); + for (Element el : elements) { + EpoDocumentId document = new EpoDocumentId(el, namespaces); + MetadatumDTO metadatum = new MetadatumDTO(); + metadatum.setElement(field.getElement()); + metadatum.setQualifier(field.getQualifier()); + metadatum.setSchema(field.getSchema()); + if (needType) { + metadatum.setValue(document.getIdAndType()); + } else { + metadatum.setValue(document.getId()); + } + values.add(metadatum); + } + return values; + } catch (JaxenException e) { + System.err.println(query); + throw new RuntimeException(e); + } + } + + /** + * This class maps EPO's response metadata needs to extract epo ID. + * + * @author Pasquale Cavallo + * + */ + public static class EpoDocumentId { + + private String documentIdType; + private String country; + private String docNumber; + private String kind; + private String date; + private List namespaces; + + + public static final String DOCDB = "docdb"; + public static final String EPODOC = "epodoc"; + public static final String ORIGIN = "origin"; + + + public EpoDocumentId(Element documentId, List namespaces) throws JaxenException { + this.namespaces = namespaces; + Element preferredId = null; + XPathExpression xpath = XPathFactory.instance().compile( + "./ns:document-id[@document-id-type=\"epodoc\"]", Filters.fpassthrough(), null, namespaces); + + List nodes = xpath.evaluate(documentId); + if (CollectionUtils.isNotEmpty(nodes)) { + preferredId = (Element) nodes.get(0); + } + if (Objects.isNull(preferredId)) { + preferredId = documentId; + } + + this.documentIdType = buildDocumentIdType(preferredId); + this.country = buildCountry(preferredId); + this.docNumber = buildDocNumber(preferredId); + this.kind = buildKind(preferredId); + this.date = buildDate(preferredId); + } + + private String buildDocumentIdType(Element documentId) throws JaxenException { + return getElement(documentId, "./@document-id-type"); + } + + private String buildCountry(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:country"); + } + + private String buildDocNumber(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:doc-number"); + } + + private String buildKind(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:kind"); + } + + private String buildDate(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:date"); + } + + + public String getDocumentIdType() { + return documentIdType; + } + + /** + * This method compute the epo ID from fields + * + * @return the EPO id + */ + public String getId() { + if (DOCDB.equals(documentIdType)) { + return country + "." + docNumber + "." + kind; + } else if (EPODOC.equals(documentIdType)) { + return docNumber + ((kind != null) ? kind : StringUtils.EMPTY); + } else { + return StringUtils.EMPTY; + } + } + + public String getIdAndType() { + if (EPODOC.equals(documentIdType)) { + return documentIdType + ":" + docNumber + ((kind != null) ? kind : ""); + } else if (DOCDB.equals(documentIdType)) { + return documentIdType + ":" + country + "." + docNumber + "." + kind; + } else { + return StringUtils.EMPTY; + } + } + + + private String getElement(Element documentId, String path) throws JaxenException { + if (Objects.isNull(documentId)) { + return StringUtils.EMPTY; + } + XPathExpression xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(documentId); + //exactly one element expected for any field + return CollectionUtils.isNotEmpty(nodes) ? getValue(nodes.get(0)) : StringUtils.EMPTY; + } + + private String getValue(Object el) { + if (el instanceof Element) { + return ((Element) el).getText(); + } else if (el instanceof Attribute) { + return ((Attribute) el).getValue(); + } else if (el instanceof String) { + return (String)el; + } else if (el instanceof Text) { + return ((Text) el).getText(); + } else { + return StringUtils.EMPTY; + } + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/JsonPathMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/JsonPathMetadataProcessor.java new file mode 100644 index 000000000000..2de0c6a0bbbc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/JsonPathMetadataProcessor.java @@ -0,0 +1,23 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; + +/** + * Service interface class for processing json object. + * The implementation of this class is responsible for all business logic calls + * for extracting of values from json object. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public interface JsonPathMetadataProcessor { + + public Collection processMetadata(String json); + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MatrixElementProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MatrixElementProcessor.java new file mode 100644 index 000000000000..c8e93971f480 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MatrixElementProcessor.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * This Processor allows to extract all values of a matrix. + * Only need to configure the path to the matrix in "pathToMatrix" + * For exaple to extract all values + * "matrix": [ + * [ + * "first", + * "second" + * ], + * [ + * "third" + * ], + * [ + * "fourth", + * "fifth" + * ] + * ], + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class MatrixElementProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToMatrix; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator array = rootNode.at(pathToMatrix).elements(); + Collection values = new ArrayList<>(); + while (array.hasNext()) { + JsonNode element = array.next(); + if (element.isArray()) { + Iterator nodes = element.iterator(); + while (nodes.hasNext()) { + String nodeValue = nodes.next().textValue(); + if (StringUtils.isNotBlank(nodeValue)) { + values.add(nodeValue); + } + } + } else { + String nodeValue = element.textValue(); + if (StringUtils.isNotBlank(nodeValue)) { + values.add(nodeValue); + } + } + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToMatrix(String pathToMatrix) { + this.pathToMatrix = pathToMatrix; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/PageRangeXPathMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/PageRangeXPathMetadataContributor.java new file mode 100644 index 000000000000..0bcb33d68948 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/PageRangeXPathMetadataContributor.java @@ -0,0 +1,110 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; + +/** + * Scopus specific implementation of {@link MetadataContributor} + * Responsible for generating the Scopus startPage and endPage from the retrieved item. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com) + */ +public class PageRangeXPathMetadataContributor extends SimpleXpathMetadatumContributor { + + private MetadataFieldConfig startPageMetadata; + + private MetadataFieldConfig endPageMetadata; + + /** + * Retrieve the metadata associated with the given Element object. + * Depending on the retrieved node (using the query), + * StartPage and EndPage values will be added to the MetadatumDTO list + * + * @param el A class to retrieve metadata from. + * @return A collection of import records. Only the StartPage and EndPage + * of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(Element el) { + List values = new LinkedList<>(); + List metadatums = null; + for (String ns : prefixToNamespaceMapping.keySet()) { + List nodes = el.getChildren(query, Namespace.getNamespace(ns)); + for (Element element : nodes) { + metadatums = getMetadatum(element.getValue()); + if (Objects.nonNull(metadatums)) { + for (MetadatumDTO metadatum : metadatums) { + values.add(metadatum); + } + } + } + } + return values; + } + + private List getMetadatum(String value) { + List metadatums = new ArrayList(); + if (StringUtils.isBlank(value)) { + return null; + } + String [] range = value.split("-"); + if (range.length == 2) { + metadatums.add(setStartPage(range)); + metadatums.add(setEndPage(range)); + } else if (range.length != 0) { + metadatums.add(setStartPage(range)); + } + return metadatums; + } + + private MetadatumDTO setEndPage(String[] range) { + MetadatumDTO endPage = new MetadatumDTO(); + endPage.setValue(range[1]); + endPage.setElement(endPageMetadata.getElement()); + endPage.setQualifier(endPageMetadata.getQualifier()); + endPage.setSchema(endPageMetadata.getSchema()); + return endPage; + } + + private MetadatumDTO setStartPage(String[] range) { + MetadatumDTO startPage = new MetadatumDTO(); + startPage.setValue(range[0]); + startPage.setElement(startPageMetadata.getElement()); + startPage.setQualifier(startPageMetadata.getQualifier()); + startPage.setSchema(startPageMetadata.getSchema()); + return startPage; + } + + public MetadataFieldConfig getStartPageMetadata() { + return startPageMetadata; + } + + public void setStartPageMetadata(MetadataFieldConfig startPageMetadata) { + this.startPageMetadata = startPageMetadata; + } + + public MetadataFieldConfig getEndPageMetadata() { + return endPageMetadata; + } + + public void setEndPageMetadata(MetadataFieldConfig endPageMetadata) { + this.endPageMetadata = endPageMetadata; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ReplaceCharacterXPathMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ReplaceCharacterXPathMetadataContributor.java new file mode 100644 index 000000000000..9fb92348be0d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ReplaceCharacterXPathMetadataContributor.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; + +/** + * This contributor replace specific character in the metadata value. + * It is useful for some provider (e.g. Scopus) which use containing "/" character. + * Actually, "/" will never encode by framework in URL building. In the same ways, if we + * encode "/" -> %2F, it will be encoded by framework and become %252F. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com) + */ +public class ReplaceCharacterXPathMetadataContributor extends SimpleXpathMetadatumContributor { + + private char characterToBeReplaced; + + private char characterToReplaceWith; + + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + for (String ns : prefixToNamespaceMapping.keySet()) { + List nodes = element.getChildren(query, Namespace.getNamespace(ns)); + for (Element el : nodes) { + values.add(getMetadatum(field, el.getValue())); + } + } + return values; + } + + private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) { + MetadatumDTO dcValue = new MetadatumDTO(); + if (Objects.isNull(field)) { + return null; + } + dcValue.setValue(value == null ? null : value.replace(characterToBeReplaced, characterToReplaceWith)); + dcValue.setElement(field.getElement()); + dcValue.setQualifier(field.getQualifier()); + dcValue.setSchema(field.getSchema()); + return dcValue; + } + + public void setCharacterToBeReplaced(int characterToBeReplaced) { + this.characterToBeReplaced = (char)characterToBeReplaced; + } + + public void setCharacterToReplaceWith(int characterToReplaceWith) { + this.characterToReplaceWith = (char)characterToReplaceWith; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleConcatContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleConcatContributor.java new file mode 100644 index 000000000000..d84bc65701c6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleConcatContributor.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * This contributor is able to concat multi value. + * Given a certain path, if it contains several nodes, + * the values of nodes will be concatenated into a single one. + * The concrete example we can see in the file wos-responce.xml in the node, + * which may contain several

    paragraphs, + * this Contributor allows concatenating all

    paragraphs. to obtain a single one. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class SimpleConcatContributor extends SimpleXpathMetadatumContributor { + + private final static Logger log = LogManager.getLogger(); + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + StringBuilder text = new StringBuilder(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + Element element = (Element) el; + if (StringUtils.isNotBlank(element.getText())) { + text.append(element.getText()); + } + } else { + log.warn("node of type: " + el.getClass()); + } + } + if (StringUtils.isNotBlank(text.toString())) { + values.add(metadataFieldMapping.toDCValue(field, text.toString())); + } + return values; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleJsonPathMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleJsonPathMetadataContributor.java new file mode 100644 index 000000000000..590fc63283b9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleJsonPathMetadataContributor.java @@ -0,0 +1,181 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.Objects; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * A simple JsonPath Metadata processor + * that allow extract value from json object + * by configuring the path in the query variable via the bean. + * moreover this can also perform more compact extractions + * by configuring specific json processor in "metadataProcessor" + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class SimpleJsonPathMetadataContributor implements MetadataContributor { + + private final static Logger log = LogManager.getLogger(); + + private String query; + + private MetadataFieldConfig field; + + protected JsonPathMetadataProcessor metadataProcessor; + + /** + * Initialize SimpleJsonPathMetadataContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig + * + * @param query The JSonPath query + * @param field the matadata field to map the result of the Json path query + * MetadataFieldConfig + */ + public SimpleJsonPathMetadataContributor(String query, MetadataFieldConfig field) { + this.query = query; + this.field = field; + } + + + /** + * Unused by this implementation + */ + @Override + public void setMetadataFieldMapping(MetadataFieldMapping> rt) { + + } + + /** + * Empty constructor for SimpleJsonPathMetadataContributor + */ + public SimpleJsonPathMetadataContributor() { + + } + + /** + * Return the MetadataFieldConfig used while retrieving MetadatumDTO + * + * @return MetadataFieldConfig + */ + public MetadataFieldConfig getField() { + return field; + } + + /** + * Setting the MetadataFieldConfig + * + * @param field MetadataFieldConfig used while retrieving MetadatumDTO + */ + public void setField(MetadataFieldConfig field) { + this.field = field; + } + + /** + * Return query used to create the JSonPath + * + * @return the query this instance is based on + */ + public String getQuery() { + return query; + } + + /** + * Return query used to create the JSonPath + * + */ + public void setQuery(String query) { + this.query = query; + } + + /** + * Used to process data got by jsonpath expression, like arrays to stringify, change date format or else + * If it is null, toString will be used. + * + * @param metadataProcessor + */ + public void setMetadataProcessor(JsonPathMetadataProcessor metadataProcessor) { + this.metadataProcessor = metadataProcessor; + } + + /** + * Retrieve the metadata associated with the given object. + * The toString() of the resulting object will be used. + * + * @param fullJson A class to retrieve metadata from. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(String fullJson) { + Collection metadata = new ArrayList<>(); + Collection metadataValue = new ArrayList<>(); + if (Objects.nonNull(metadataProcessor)) { + metadataValue = metadataProcessor.processMetadata(fullJson); + } else { + JsonNode jsonNode = convertStringJsonToJsonNode(fullJson); + JsonNode node = jsonNode.at(query); + if (node.isArray()) { + Iterator nodes = node.iterator(); + while (nodes.hasNext()) { + String nodeValue = getStringValue(nodes.next()); + if (StringUtils.isNotBlank(nodeValue)) { + metadataValue.add(nodeValue); + } + } + } else if (!node.isNull() && StringUtils.isNotBlank(node.toString())) { + String nodeValue = getStringValue(node); + if (StringUtils.isNotBlank(nodeValue)) { + metadataValue.add(nodeValue); + } + } + } + for (String value : metadataValue) { + MetadatumDTO metadatumDto = new MetadatumDTO(); + metadatumDto.setValue(value); + metadatumDto.setElement(field.getElement()); + metadatumDto.setQualifier(field.getQualifier()); + metadatumDto.setSchema(field.getSchema()); + metadata.add(metadatumDto); + } + return metadata; + } + + private String getStringValue(JsonNode node) { + if (node.isTextual()) { + return node.textValue(); + } + if (node.isNumber()) { + return node.numberValue().toString(); + } + log.error("It wasn't possible to convert the value of the following JsonNode:" + node.asText()); + return StringUtils.EMPTY; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleMultiplePathContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleMultiplePathContributor.java new file mode 100644 index 000000000000..57a329315168 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleMultiplePathContributor.java @@ -0,0 +1,75 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * Web of Science specific implementation of {@link MetadataContributor}. + * This contributor can perform research on multi-paths. + * For example, to populate the subject metadata, in the Web of Science response + * the values are contained in different paths, + * so this Contributor allows you to collect the values by configuring the paths in the paths list. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class SimpleMultiplePathContributor extends SimpleXpathMetadatumContributor { + + private final static Logger log = LogManager.getLogger(); + + private List paths; + + public SimpleMultiplePathContributor() {} + + public SimpleMultiplePathContributor(List paths) { + this.paths = paths; + } + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + for (String path : this.paths) { + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + values.add(metadataFieldMapping.toDCValue(field, ((Element) el).getText())); + } else { + log.warn("node of type: " + el.getClass()); + } + } + } + return values; + } + + public List getPaths() { + return paths; + } + + public void setPaths(List paths) { + this.paths = paths; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataConcatContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataConcatContributor.java new file mode 100644 index 000000000000..5dd354c6f18c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataConcatContributor.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * This contributor extends SimpleRisToMetadataContributor, + * in particular, this one is able to chain multi values into a single one + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public class SimpleRisToMetadataConcatContributor extends SimpleRisToMetadataContributor { + + private String tag; + + private MetadataFieldConfig metadata; + + @Override + public Collection contributeMetadata(Map> record) { + List values = new LinkedList<>(); + List fieldValues = record.get(this.tag); + Optional.ofNullable(fieldValues) + .map(fv -> fv.stream()) + .map(s -> s.collect(Collectors.joining(" "))) + .ifPresent(t -> values.add(this.metadataFieldMapping.toDCValue(this.metadata, t))); + return values; + } + + public String getTag() { + return tag; + } + + public void setTag(String tag) { + this.tag = tag; + } + + public MetadataFieldConfig getMetadata() { + return metadata; + } + + public void setMetadata(MetadataFieldConfig metadata) { + this.metadata = metadata; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataContributor.java new file mode 100644 index 000000000000..36ea0dd47839 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataContributor.java @@ -0,0 +1,71 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * Metadata contributor that takes a record defined as Map> + * and turns it into metadatums configured in fieldToMetadata + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public class SimpleRisToMetadataContributor implements MetadataContributor>> { + + protected Map fieldToMetadata; + + protected MetadataFieldMapping>, + MetadataContributor>>> metadataFieldMapping; + + public SimpleRisToMetadataContributor() {} + + public SimpleRisToMetadataContributor(Map fieldToMetadata) { + this.fieldToMetadata = fieldToMetadata; + } + + @Override + public Collection contributeMetadata(Map> record) { + List values = new LinkedList<>(); + for (String field : fieldToMetadata.keySet()) { + List fieldValues = record.get(field); + if (Objects.nonNull(fieldValues)) { + for (String value : fieldValues) { + values.add(metadataFieldMapping.toDCValue(fieldToMetadata.get(field), value)); + } + } + } + return values; + } + + public Map getFieldToMetadata() { + return fieldToMetadata; + } + + public void setFieldToMetadata(Map fieldToMetadata) { + this.fieldToMetadata = fieldToMetadata; + } + + public MetadataFieldMapping>, + MetadataContributor>>> getMetadataFieldMapping() { + return metadataFieldMapping; + } + + public void setMetadataFieldMapping(MetadataFieldMapping>, + MetadataContributor>>> metadataFieldMapping) { + this.metadataFieldMapping = metadataFieldMapping; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathDateFormatMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathDateFormatMetadataContributor.java new file mode 100644 index 000000000000..fb15cd60ab00 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathDateFormatMetadataContributor.java @@ -0,0 +1,91 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.text.DateFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Attribute; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * This contributor can be used when parsing an XML file, + * particularly to extract a date and convert it to a specific format. + * In the variable dateFormatFrom the read format should be configured, + * instead in the variable dateFormatTo the format you want to obtain. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class SimpleXpathDateFormatMetadataContributor extends SimpleXpathMetadatumContributor { + + private DateFormat dateFormatFrom; + private DateFormat dateFormatTo; + + public void setDateFormatFrom(String dateFormatFrom) { + this.dateFormatFrom = new SimpleDateFormat(dateFormatFrom); + } + + public void setDateFormatTo(String dateFormatTo) { + this.dateFormatTo = new SimpleDateFormat(dateFormatTo); + } + + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance() + .compile(query,Filters.fpassthrough(), null, namespaces); + List nodes = xpath.evaluate(element); + for (Object el : nodes) { + if (el instanceof Element) { + values.add(getMetadatum(field, ((Element) el).getText())); + } else if (el instanceof Attribute) { + values.add(getMetadatum(field, ((Attribute) el).getValue())); + } else if (el instanceof String) { + values.add(getMetadatum(field, (String) el)); + } else if (el instanceof Text) { + values.add(metadataFieldMapping.toDCValue(field, ((Text) el).getText())); + } else { + System.err.println("node of type: " + el.getClass()); + } + } + return values; + } + + private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) { + MetadatumDTO dcValue = new MetadatumDTO(); + if (field == null) { + return null; + } + try { + dcValue.setValue(dateFormatTo.format(dateFormatFrom.parse(value))); + } catch (ParseException e) { + dcValue.setValue(value); + } + dcValue.setElement(field.getElement()); + dcValue.setQualifier(field.getQualifier()); + dcValue.setSchema(field.getSchema()); + return dcValue; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeContributor.java new file mode 100644 index 000000000000..edaad8a2499a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumAndAttributeContributor.java @@ -0,0 +1,69 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * This contributor checks for each node returned for the supplied path + * if node contains supplied attribute - the value of the current node is taken if exist. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot com) + */ +public class SimpleXpathMetadatumAndAttributeContributor extends SimpleXpathMetadatumContributor { + + private final static Logger log = LogManager.getLogger(); + + private String attribute; + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + Element element = (Element) el; + String attributeValue = element.getAttributeValue(this.attribute); + if (StringUtils.isNotBlank(attributeValue)) { + values.add(metadataFieldMapping.toDCValue(this.field, attributeValue)); + } + } else { + log.warn("node of type: " + el.getClass()); + } + } + return values; + } + + public String getAttribute() { + return attribute; + } + + public void setAttribute(String attribute) { + this.attribute = attribute; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java index 87cdbfa6ed04..05f8647d7867 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java @@ -7,33 +7,36 @@ */ package org.dspace.importer.external.metadatamapping.contributor; +import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.annotation.Resource; -import org.apache.axiom.om.OMAttribute; -import org.apache.axiom.om.OMElement; -import org.apache.axiom.om.OMText; -import org.apache.axiom.om.xpath.AXIOMXPath; +import org.apache.logging.log4j.Logger; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadatumDTO; -import org.jaxen.JaxenException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.jdom2.Attribute; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; import org.springframework.beans.factory.annotation.Autowired; /** - * Metadata contributor that takes an axiom OMElement and turns it into a metadatum + * Metadata contributor that takes a JDOM Element and turns it into a metadatum * * @author Roeland Dillen (roeland at atmire dot com) */ -public class SimpleXpathMetadatumContributor implements MetadataContributor { - private MetadataFieldConfig field; +public class SimpleXpathMetadatumContributor implements MetadataContributor { - private static final Logger log = LoggerFactory.getLogger(SimpleXpathMetadatumContributor.class); + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(); + + protected MetadataFieldConfig field; /** * Return prefixToNamespaceMapping @@ -44,14 +47,14 @@ public Map getPrefixToNamespaceMapping() { return prefixToNamespaceMapping; } - private MetadataFieldMapping> metadataFieldMapping; + protected MetadataFieldMapping> metadataFieldMapping; /** * Return metadataFieldMapping * * @return MetadataFieldMapping */ - public MetadataFieldMapping> getMetadataFieldMapping() { + public MetadataFieldMapping> getMetadataFieldMapping() { return metadataFieldMapping; } @@ -62,7 +65,7 @@ public MetadataFieldMapping> getMetada */ @Override public void setMetadataFieldMapping( - MetadataFieldMapping> metadataFieldMapping) { + MetadataFieldMapping> metadataFieldMapping) { this.metadataFieldMapping = metadataFieldMapping; } @@ -76,7 +79,7 @@ public void setPrefixToNamespaceMapping(Map prefixToNamespaceMap this.prefixToNamespaceMapping = prefixToNamespaceMapping; } - private Map prefixToNamespaceMapping; + protected Map prefixToNamespaceMapping; /** * Initialize SimpleXpathMetadatumContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig @@ -100,7 +103,7 @@ public SimpleXpathMetadatumContributor() { } - private String query; + protected String query; /** * Return the MetadataFieldConfig used while retrieving MetadatumDTO @@ -140,36 +143,33 @@ public void setQuery(String query) { * Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO * list * - * @param t A class to retrieve metadata from. + * @param t An element to retrieve metadata from. * @return a collection of import records. Only the identifier of the found records may be put in the record. */ @Override - public Collection contributeMetadata(OMElement t) { + public Collection contributeMetadata(Element t) { List values = new LinkedList<>(); - try { - AXIOMXPath xpath = new AXIOMXPath(query); - for (String ns : prefixToNamespaceMapping.keySet()) { - xpath.addNamespace(prefixToNamespaceMapping.get(ns), ns); - } - List nodes = xpath.selectNodes(t); - for (Object el : nodes) { - if (el instanceof OMElement) { - values.add(metadataFieldMapping.toDCValue(field, ((OMElement) el).getText())); - } else if (el instanceof OMAttribute) { - values.add(metadataFieldMapping.toDCValue(field, ((OMAttribute) el).getAttributeValue())); - } else if (el instanceof String) { - values.add(metadataFieldMapping.toDCValue(field, (String) el)); - } else if (el instanceof OMText) { - values.add(metadataFieldMapping.toDCValue(field, ((OMText) el).getText())); - } else { - log.error("node of type: " + el.getClass()); - } + + List namespaces = new ArrayList<>(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + values.add(metadataFieldMapping.toDCValue(field, ((Element) el).getText())); + } else if (el instanceof Attribute) { + values.add(metadataFieldMapping.toDCValue(field, ((Attribute) el).getValue())); + } else if (el instanceof String) { + values.add(metadataFieldMapping.toDCValue(field, (String) el)); + } else if (el instanceof Text) { + values.add(metadataFieldMapping.toDCValue(field, ((Text) el).getText())); + } else { + log.error("Encountered unsupported XML node of type: {}. Skipped that node.", el.getClass()); } - return values; - } catch (JaxenException e) { - log.error(query, e); - throw new RuntimeException(e); } - + return values; } -} + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SplitMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SplitMetadataContributor.java new file mode 100644 index 000000000000..c04081957f19 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SplitMetadataContributor.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; + +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * Wrapper class used to split another MetadataContributor's output into distinct values. + * The split is performed by matching a regular expression against the wrapped MetadataContributor's output. + * + * @author Philipp Rumpf (philipp.rumpf@uni-bamberg.de) + */ + +public class SplitMetadataContributor implements MetadataContributor { + private final MetadataContributor innerContributor; + private final String regex; + + /** + * @param innerContributor The MetadataContributor whose output is split + * @param regex A regular expression matching the separator between different values + */ + public SplitMetadataContributor(MetadataContributor innerContributor, String regex) { + this.innerContributor = innerContributor; + this.regex = regex; + } + + @Override + public void setMetadataFieldMapping(MetadataFieldMapping> rt) { + + } + + /** + * Each metadatum returned by the wrapped MetadataContributor is split into one or more metadata values + * based on the provided regular expression. + * + * @param t The recordType object to retrieve metadata from + * @return The collection of processed metadata values + */ + @Override + public Collection contributeMetadata(T t) { + Collection metadata = innerContributor.contributeMetadata(t); + ArrayList splitMetadata = new ArrayList<>(); + for (MetadatumDTO metadatumDTO : metadata) { + String[] split = metadatumDTO.getValue().split(regex); + for (String splitItem : split) { + MetadatumDTO splitMetadatumDTO = new MetadatumDTO(); + splitMetadatumDTO.setSchema(metadatumDTO.getSchema()); + splitMetadatumDTO.setElement(metadatumDTO.getElement()); + splitMetadatumDTO.setQualifier(metadatumDTO.getQualifier()); + splitMetadatumDTO.setValue(splitItem); + splitMetadata.add(splitMetadatumDTO); + } + } + return splitMetadata; + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosAttribute2ValueContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosAttribute2ValueContributor.java new file mode 100644 index 000000000000..66e16f7ae866 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosAttribute2ValueContributor.java @@ -0,0 +1,160 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import javax.annotation.Resource; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * Web Of Science specific implementation of {@link MetadataContributor} + * This contributor checks for each node returned for the given path if the node contains "this.attribute" + * and then checks if the attribute value is one of the values configured + * in the "this.attributeValue2metadata" map, if the value of the current known is taken. + * If "this.firstChild" is true, it takes the value of the child of the known. + * The mapping and configuration of this class can be found in the following wos-integration.xml file. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class WosAttribute2ValueContributor implements MetadataContributor { + + private final static Logger log = LogManager.getLogger(); + + private String query; + + private String attribute; + + private boolean firstChild; + + private String childName; + + private Map prefixToNamespaceMapping; + + private Map attributeValue2metadata; + + private MetadataFieldMapping> metadataFieldMapping; + + public WosAttribute2ValueContributor() {} + + public WosAttribute2ValueContributor(String query, + Map prefixToNamespaceMapping, + Map attributeValue2metadata) { + this.query = query; + this.prefixToNamespaceMapping = prefixToNamespaceMapping; + this.attributeValue2metadata = attributeValue2metadata; + } + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + Element element = (Element) el; + String attributeValue = element.getAttributeValue(this.attribute); + setField(attributeValue, element, values); + } else { + log.warn("node of type: " + el.getClass()); + } + } + return values; + } + + private void setField(String attributeValue, Element el, List values) { + for (String id : attributeValue2metadata.keySet()) { + if (StringUtils.equals(id, attributeValue)) { + if (this.firstChild) { + String value = el.getChild(this.childName).getValue(); + values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), value)); + } else { + values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), el.getText())); + } + } + } + } + + public MetadataFieldMapping> getMetadataFieldMapping() { + return metadataFieldMapping; + } + + public void setMetadataFieldMapping( + MetadataFieldMapping> metadataFieldMapping) { + this.metadataFieldMapping = metadataFieldMapping; + } + + @Resource(name = "isiFullprefixMapping") + public void setPrefixToNamespaceMapping(Map prefixToNamespaceMapping) { + this.prefixToNamespaceMapping = prefixToNamespaceMapping; + } + + public Map getPrefixToNamespaceMapping() { + return prefixToNamespaceMapping; + } + + public String getAttribute() { + return attribute; + } + + public void setAttribute(String attribute) { + this.attribute = attribute; + } + + public Map getAttributeValue2metadata() { + return attributeValue2metadata; + } + + public void setAttributeValue2metadata(Map attributeValue2metadata) { + this.attributeValue2metadata = attributeValue2metadata; + } + + public String getQuery() { + return query; + } + + public void setQuery(String query) { + this.query = query; + } + + public boolean isFirstChild() { + return firstChild; + } + + public void setFirstChild(boolean firstChild) { + this.firstChild = firstChild; + } + + public String getChildName() { + return childName; + } + + public void setChildName(String childName) { + this.childName = childName; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierContributor.java new file mode 100644 index 000000000000..cf434c326e6b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierContributor.java @@ -0,0 +1,71 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * This contributor can retrieve the identifiers + * configured in "this.identifire2field" from the Web of Science response. + * The mapping and configuration of this class can be found in the following wos-integration.xml file. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class WosIdentifierContributor extends SimpleXpathMetadatumContributor { + + protected Map identifier2field; + + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + List namespaces = new ArrayList<>(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = + XPathFactory.instance().compile(query, Filters.element(), null, namespaces); + + List nodes = xpath.evaluate(element); + for (Element el : nodes) { + String type = el.getAttributeValue("type"); + setIdentyfier(type, el, values); + } + return values; + } + + private void setIdentyfier(String type, Element el, List values) { + for (String id : identifier2field.keySet()) { + if (StringUtils.equals(id, type)) { + String value = el.getAttributeValue("value"); + values.add(metadataFieldMapping.toDCValue(identifier2field.get(id), value)); + } + } + } + + public Map getIdentifier2field() { + return identifier2field; + } + + public void setIdentifier2field(Map identifier2field) { + this.identifier2field = identifier2field; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierRidContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierRidContributor.java new file mode 100644 index 000000000000..768ef50e65ed --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/WosIdentifierRidContributor.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * Web Of Science specific implementation of {@link MetadataContributor} + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class WosIdentifierRidContributor extends SimpleXpathMetadatumContributor { + + private final static Logger log = LogManager.getLogger(); + + @Override + public Collection contributeMetadata(Element t) { + List values = new LinkedList<>(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(t); + for (Object el : nodes) { + if (el instanceof Element) { + Element element = ((Element) el).getChild("name"); + if (Objects.nonNull(element)) { + String type = element.getAttributeValue("role"); + setIdentyfier(type, element, values); + } + } else { + log.warn("node of type: " + el.getClass()); + } + } + return values; + } + + private void setIdentyfier(String type, Element el, List values) { + if (StringUtils.equals("researcher_id", type)) { + String value = el.getAttributeValue("r_id"); + if (StringUtils.isNotBlank(value)) { + values.add(metadataFieldMapping.toDCValue(this.field, value)); + } + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java index ba2316755300..add9caef1b74 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java @@ -15,8 +15,8 @@ import java.util.LinkedList; import java.util.List; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.content.DCDate; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadatumDTO; @@ -107,26 +107,30 @@ public Collection contributeMetadata(T t) { LinkedList dayList = (LinkedList) day.contributeMetadata(t); for (int i = 0; i < yearList.size(); i++) { - DCDate dcDate = null; + String resultDateString = ""; String dateString = ""; + SimpleDateFormat resultFormatter = null; if (monthList.size() > i && dayList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() + "-" + dayList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM-dd"); } else if (monthList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM"); } else { dateString = yearList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy"); } int j = 0; // Use the first dcDate that has been formatted (Config should go from most specific to most lenient) - while (j < dateFormatsToAttempt.size() && dcDate == null) { + while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) { String dateFormat = dateFormatsToAttempt.get(j); try { SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); Date date = formatter.parse(dateString); - dcDate = new DCDate(date); + resultDateString = resultFormatter.format(date); } catch (ParseException e) { // Multiple dateformats can be configured, we don't want to print the entire stacktrace every // time one of those formats fails. @@ -136,8 +140,8 @@ public Collection contributeMetadata(T t) { } j++; } - if (dcDate != null) { - values.add(metadataFieldMapping.toDCValue(field, dcDate.toString())); + if (StringUtils.isNotBlank(resultDateString)) { + values.add(metadataFieldMapping.toDCValue(field, resultDateString)); } else { log.info( "Failed parsing " + dateString + ", check " + diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java index 4802dcfa1787..a6cfa625bbcf 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/service/PubmedImportMetadataSourceServiceImpl.java @@ -14,31 +14,34 @@ import java.io.Reader; import java.io.StringReader; import java.util.Collection; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.concurrent.Callable; -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; -import javax.ws.rs.client.Invocation; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; import com.google.common.io.CharStreams; -import org.apache.axiom.om.OMElement; -import org.apache.axiom.om.OMXMLBuilderFactory; -import org.apache.axiom.om.OMXMLParserWrapper; -import org.apache.axiom.om.xpath.AXIOMXPath; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; import org.dspace.content.Item; import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.Query; import org.dspace.importer.external.exception.FileMultipleOccurencesException; import org.dspace.importer.external.exception.FileSourceException; import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; import org.dspace.importer.external.service.AbstractImportMetadataSourceService; import org.dspace.importer.external.service.components.FileSource; import org.dspace.importer.external.service.components.QuerySource; -import org.jaxen.JaxenException; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; /** * Implements a data source for querying PubMed Central @@ -46,20 +49,23 @@ * @author Roeland Dillen (roeland at atmire dot com) * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) */ -public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService +public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService implements QuerySource, FileSource { - private String baseAddress; + private String urlFetch; + private String urlSearch; - // it is protected so that subclass can mock it for testing - protected WebTarget pubmedWebTarget; + private int attempt = 3; private List supportedExtensions; + @Autowired + private LiveImportClient liveImportClient; + /** * Set the file extensions supported by this metadata service * - * @param supportedExtensionsthe file extensions (xml,txt,...) supported by this service + * @param supportedExtensions the file extensions (xml,txt,...) supported by this service */ public void setSupportedExtensions(List supportedExtensions) { this.supportedExtensions = supportedExtensions; @@ -185,29 +191,7 @@ public Collection findMatchingRecords(Query query) throws Metadata * @throws Exception on generic exception */ @Override - public void init() throws Exception { - Client client = ClientBuilder.newClient(); - WebTarget webTarget = client.target(baseAddress); - pubmedWebTarget = webTarget.queryParam("db", "pubmed"); - } - - /** - * Return the baseAddress set to this object - * - * @return The String object that represents the baseAddress of this object - */ - public String getBaseAddress() { - return baseAddress; - } - - /** - * Set the baseAddress to this object - * - * @param baseAddress The String object that represents the baseAddress of this object - */ - public void setBaseAddress(String baseAddress) { - this.baseAddress = baseAddress; - } + public void init() throws Exception {} private class GetNbRecords implements Callable { @@ -224,36 +208,43 @@ public GetNbRecords(Query query) { @Override public Integer call() throws Exception { - WebTarget getRecordIdsTarget = pubmedWebTarget - .queryParam("term", query.getParameterAsClass("query", String.class)); - - getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); - - Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); - - Response response = invocationBuilder.get(); - - String responseString = response.readEntity(String.class); + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("db", "pubmed"); + uriBuilder.addParameter("term", query.getParameterAsClass("query", String.class)); + Map> params = new HashMap>(); + String response = StringUtils.EMPTY; + int countAttempt = 0; + while (StringUtils.isBlank(response) && countAttempt <= attempt) { + countAttempt++; + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } - String count = getSingleElementValue(responseString, "Count"); + if (StringUtils.isBlank(response)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder.toString()); + } - return Integer.parseInt(count); + return Integer.parseInt(getSingleElementValue(response, "Count")); } } - private String getSingleElementValue(String src, String elementName) { - OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(src)); - OMElement element = records.getDocumentElement(); - AXIOMXPath xpath = null; String value = null; + try { - xpath = new AXIOMXPath("//" + elementName); - List recordsList = xpath.selectNodes(element); - if (!recordsList.isEmpty()) { - value = recordsList.get(0).getText(); + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(src)); + Element root = document.getRootElement(); + + XPathExpression xpath = + XPathFactory.instance().compile("//" + elementName, Filters.element()); + + Element record = xpath.evaluateFirst(root); + if (record != null) { + value = record.getText(); } - } catch (JaxenException e) { + } catch (JDOMException | IOException e) { value = null; } return value; @@ -280,43 +271,76 @@ public Collection call() throws Exception { Integer start = query.getParameterAsClass("start", Integer.class); Integer count = query.getParameterAsClass("count", Integer.class); - if (count == null || count < 0) { + if (Objects.isNull(count) || count < 0) { count = 10; } - if (start == null || start < 0) { + if (Objects.isNull(start) || start < 0) { start = 0; } List records = new LinkedList(); - WebTarget getRecordIdsTarget = pubmedWebTarget.queryParam("term", queryString); - getRecordIdsTarget = getRecordIdsTarget.queryParam("retstart", start); - getRecordIdsTarget = getRecordIdsTarget.queryParam("retmax", count); - getRecordIdsTarget = getRecordIdsTarget.queryParam("usehistory", "y"); - getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); - - Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); - - Response response = invocationBuilder.get(); - String responseString = response.readEntity(String.class); + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("db", "pubmed"); + uriBuilder.addParameter("retstart", start.toString()); + uriBuilder.addParameter("retmax", count.toString()); + uriBuilder.addParameter("usehistory", "y"); + uriBuilder.addParameter("term", queryString); + Map> params = new HashMap>(); + String response = StringUtils.EMPTY; + int countAttempt = 0; + while (StringUtils.isBlank(response) && countAttempt <= attempt) { + countAttempt++; + + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } + + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + lastRequest = System.currentTimeMillis(); + } - String queryKey = getSingleElementValue(responseString, "QueryKey"); - String webEnv = getSingleElementValue(responseString, "WebEnv"); + if (StringUtils.isBlank(response)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder.toString()); + } - WebTarget getRecordsTarget = pubmedWebTarget.queryParam("WebEnv", webEnv); - getRecordsTarget = getRecordsTarget.queryParam("query_key", queryKey); - getRecordsTarget = getRecordsTarget.queryParam("retmode", "xml"); - getRecordsTarget = getRecordsTarget.path("efetch.fcgi"); - getRecordsTarget = getRecordsTarget.queryParam("retmax", count); - getRecordsTarget = getRecordsTarget.queryParam("retstart", start); + String queryKey = getSingleElementValue(response, "QueryKey"); + String webEnv = getSingleElementValue(response, "WebEnv"); + + URIBuilder uriBuilder2 = new URIBuilder(urlFetch); + uriBuilder2.addParameter("db", "pubmed"); + uriBuilder2.addParameter("retstart", start.toString()); + uriBuilder2.addParameter("retmax", count.toString()); + uriBuilder2.addParameter("WebEnv", webEnv); + uriBuilder2.addParameter("query_key", queryKey); + uriBuilder2.addParameter("retmode", "xml"); + Map> params2 = new HashMap>(); + String response2 = StringUtils.EMPTY; + countAttempt = 0; + while (StringUtils.isBlank(response2) && countAttempt <= attempt) { + countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } + response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2); + + lastRequest = System.currentTimeMillis(); + } - invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); - response = invocationBuilder.get(); + if (StringUtils.isBlank(response2)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder2.toString()); + } - List omElements = splitToRecords(response.readEntity(String.class)); + List elements = splitToRecords(response2); - for (OMElement record : omElements) { + for (Element record : elements) { records.add(transformSourceRecords(record)); } @@ -324,15 +348,23 @@ public Collection call() throws Exception { } } - private List splitToRecords(String recordsSrc) { - OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc)); - OMElement element = records.getDocumentElement(); - AXIOMXPath xpath = null; + private List splitToRecords(String recordsSrc) { try { - xpath = new AXIOMXPath("//PubmedArticle"); - List recordsList = xpath.selectNodes(element); + SAXBuilder saxBuilder = new SAXBuilder(); + // Disallow external entities & entity expansion to protect against XXE attacks + // (NOTE: We receive errors if we disable all DTDs for PubMed, so this is the best we can do) + saxBuilder.setFeature("http://xml.org/sax/features/external-general-entities", false); + saxBuilder.setFeature("http://xml.org/sax/features/external-parameter-entities", false); + saxBuilder.setExpandEntities(false); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + + XPathExpression xpath = + XPathFactory.instance().compile("//PubmedArticle", Filters.element()); + + List recordsList = xpath.evaluate(root); return recordsList; - } catch (JaxenException e) { + } catch (JDOMException | IOException e) { return null; } } @@ -352,23 +384,29 @@ public GetRecord(Query q) { @Override public ImportRecord call() throws Exception { - String id = query.getParameterAsClass("id", String.class); - - WebTarget getRecordTarget = pubmedWebTarget.queryParam("id", id); - getRecordTarget = getRecordTarget.queryParam("retmode", "xml"); - getRecordTarget = getRecordTarget.path("efetch.fcgi"); - Invocation.Builder invocationBuilder = getRecordTarget.request(MediaType.TEXT_PLAIN_TYPE); - - Response response = invocationBuilder.get(); - - List omElements = splitToRecords(response.readEntity(String.class)); + URIBuilder uriBuilder = new URIBuilder(urlFetch); + uriBuilder.addParameter("db", "pubmed"); + uriBuilder.addParameter("retmode", "xml"); + uriBuilder.addParameter("id", query.getParameterAsClass("id", String.class)); + + Map> params = new HashMap>(); + String response = StringUtils.EMPTY; + int countAttempt = 0; + while (StringUtils.isBlank(response) && countAttempt <= attempt) { + countAttempt++; + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } - if (omElements.size() == 0) { - return null; + if (StringUtils.isBlank(response)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder.toString()); } - return transformSourceRecords(omElements.get(0)); + List elements = splitToRecords(response); + + return elements.isEmpty() ? null : transformSourceRecords(elements.get(0)); } } @@ -387,40 +425,68 @@ public FindMatchingRecords(Query q) { @Override public Collection call() throws Exception { - WebTarget getRecordIdsTarget = pubmedWebTarget - .queryParam("term", query.getParameterAsClass("term", String.class)); - getRecordIdsTarget = getRecordIdsTarget - .queryParam("field", query.getParameterAsClass("field", String.class)); - getRecordIdsTarget = getRecordIdsTarget.queryParam("usehistory", "y"); - getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); - - Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); - - Response response = invocationBuilder.get(); - String responseString = response.readEntity(String.class); + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("db", "pubmed"); + uriBuilder.addParameter("usehistory", "y"); + uriBuilder.addParameter("term", query.getParameterAsClass("term", String.class)); + uriBuilder.addParameter("field", query.getParameterAsClass("field", String.class)); + + Map> params = new HashMap>(); + String response = StringUtils.EMPTY; + int countAttempt = 0; + while (StringUtils.isBlank(response) && countAttempt <= attempt) { + countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } + + response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + lastRequest = System.currentTimeMillis(); + } - String queryKey = getSingleElementValue(responseString, "QueryKey"); - String webEnv = getSingleElementValue(responseString, "WebEnv"); + if (StringUtils.isBlank(response)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder.toString()); + } - WebTarget getRecordsTarget = pubmedWebTarget.queryParam("WebEnv", webEnv); - getRecordsTarget = getRecordsTarget.queryParam("query_key", queryKey); - getRecordsTarget = getRecordsTarget.queryParam("retmode", "xml"); - getRecordsTarget = getRecordsTarget.path("efetch.fcgi"); + String webEnv = getSingleElementValue(response, "WebEnv"); + String queryKey = getSingleElementValue(response, "QueryKey"); + + URIBuilder uriBuilder2 = new URIBuilder(urlFetch); + uriBuilder2.addParameter("db", "pubmed"); + uriBuilder2.addParameter("retmode", "xml"); + uriBuilder2.addParameter("WebEnv", webEnv); + uriBuilder2.addParameter("query_key", queryKey); + + Map> params2 = new HashMap>(); + String response2 = StringUtils.EMPTY; + countAttempt = 0; + while (StringUtils.isBlank(response2) && countAttempt <= attempt) { + countAttempt++; + long time = System.currentTimeMillis() - lastRequest; + if ((time) < interRequestTime) { + Thread.sleep(interRequestTime - time); + } + response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2); + lastRequest = System.currentTimeMillis(); + } - invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); - response = invocationBuilder.get(); + if (StringUtils.isBlank(response2)) { + throw new RuntimeException("After " + attempt + + " attempts to contact the PubMed service, a correct answer could not be received." + + " The request was made with this URL:" + uriBuilder2.toString()); + } - String xml = response.readEntity(String.class); - return parseXMLString(xml); + return parseXMLString(response2); } } - @Override public List getRecords(InputStream inputStream) throws FileSourceException { - String xml = null; try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) { - xml = CharStreams.toString(reader); + String xml = CharStreams.toString(reader); return parseXMLString(xml); } catch (IOException e) { throw new FileSourceException ("Cannot read XML from InputStream", e); @@ -441,10 +507,27 @@ public ImportRecord getRecord(InputStream inputStream) throws FileSourceExceptio private List parseXMLString(String xml) { List records = new LinkedList(); - List omElements = splitToRecords(xml); - for (OMElement record : omElements) { + List elements = splitToRecords(xml); + for (Element record : elements) { records.add(transformSourceRecords(record)); } return records; } + + public String getUrlFetch() { + return urlFetch; + } + + public void setUrlFetch(String urlFetch) { + this.urlFetch = urlFetch; + } + + public String getUrlSearch() { + return urlSearch; + } + + public void setUrlSearch(String urlSearch) { + this.urlSearch = urlSearch; + } + } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeFieldMapping.java new file mode 100644 index 000000000000..8c8e23fe989a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeFieldMapping.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.pubmedeurope; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the PubmedEurope metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class PubmedEuropeFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "pubmedEuropeMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..92d7d9fbd3fe --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmedeurope/PubmedEuropeMetadataSourceServiceImpl.java @@ -0,0 +1,423 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.pubmedeurope; + +import java.io.IOException; +import java.io.StringReader; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpException; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.jaxen.JaxenException; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.CollectionUtils; + +/** + * Implements a data source for querying PubMed Europe + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class PubmedEuropeMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public String getImportSource() { + return "pubmedeu"; + } + + /** + * Get a single record from the PubMed Europe. + * + * @param id Identifier for the record + * @return The first matching record + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(id)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + /** + * Find the number of records matching a query; + * + * @param query a query string to base the search on. + * @return the sum of the matching records over this import source + * @throws MetadataSourceException if the underlying methods throw any exception. + */ + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + /** + * Find the number of records matching a query; + * + * @param query A query string to base the search on. + * @return The sum of the matching records over this import source + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + /** + * Find records matching a string query. + * + * @param query A query string to base the search on. + * @param start Offset to start at + * @param count Number of records to retrieve. + * @return A set of records. Fully transformed. + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + /** + * Find records based on a object query. + * + * @param query A query object to base the search on. + * @return A set of records. Fully transformed. + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + /** + * Get a single record from the PubMed Europe. + * + * @param query A query matching a single record + * @return The first matching record + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + /** + * Finds records based on query object. + * + * @param query A query object to base the search on. + * @return A collection of import records. + * @throws MetadataSourceException If the underlying methods throw any exception. + */ + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + return retry(new FindMatchingRecordCallable(query)); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for PubMed Europe"); + } + + @Override + public void init() throws Exception {} + + public List getByPubmedEuropeID(String pubmedID, Integer start, Integer size) + throws IOException, HttpException { + String query = "(EXT_ID:" + pubmedID + ")"; + return search(query, size < 1 ? 1 : size, start); + } + + /** + * This class is a Callable implementation to get PubMed Europe entries based on + * query object. + * + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String queryString = query.getParameterAsClass("query", String.class); + return search(queryString, count, start); + + } + } + + /** + * This class is a Callable implementation to get an PubMed Europe entry using PubMed Europe ID + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + return getByPubmedEuropeID(query.getParameterAsClass("id", String.class), 1 ,0); + } + } + + /** + * This class is a Callable implementation to search PubMed Europe entries + * using author, title and year. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + public class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + String title = query.getParameterAsClass("title", String.class); + String author = query.getParameterAsClass("author", String.class); + Integer year = query.getParameterAsClass("year", Integer.class); + Integer maxResult = query.getParameterAsClass("maxResult", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + return search(title, author, year, maxResult, start); + } + + } + + /** + * This class is a Callable implementation to count the number + * of entries for an PubMed Europe query. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class CountByQueryCallable implements Callable { + private Query query; + + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + try { + return count(query.getParameterAsClass("query", String.class)); + } catch (Exception e) { + throw new RuntimeException(); + } + } + } + + /** + * Returns the total number of PubMed Europe publications returned by a specific query + * + * @param query A keyword or combination of keywords to be searched + * @throws URISyntaxException If URI syntax error + * @throws ClientProtocolException The client protocol exception + * @throws IOException If IO error + * @throws JaxenException If Xpath evaluation failed + */ + public Integer count(String query) throws URISyntaxException, ClientProtocolException, IOException, JaxenException { + try { + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, buildURI(1, query), params); + + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + Element element = root.getChild("hitCount"); + return Integer.parseInt(element.getValue()); + } catch (JDOMException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + } + + public List search(String title, String author, int year, int count, int start) + throws IOException { + StringBuffer query = new StringBuffer(); + query.append("("); + if (StringUtils.isNotBlank(title)) { + query.append("TITLE:").append(title); + query.append(")"); + } + if (StringUtils.isNotBlank(author)) { + // Search for a surname and (optionally) initial(s) in publication author lists + // AUTH:einstein, AUTH:”Smith AB” + String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)"; + String[] authors = author.split(splitRegex); + if (query.length() > 0) { + query.append(" AND "); + } + query.append("("); + int countAuthors = 0; + for (String auth : authors) { + countAuthors++; + query.append("AUTH:\"").append(auth).append("\""); + if (countAuthors < authors.length) { + query.append(" AND "); + } + } + query.append(")"); + } + if (year != -1) { + if (query.length() > 0) { + query.append(" AND "); + } + query.append("( PUB_YEAR:").append(year).append(")"); + } + query.append(")"); + return search(query.toString(), count, start); + } + + /** + * Returns a list of PubMed Europe publication records + * + * @param query A keyword or combination of keywords to be searched + * @param size The number of search results per page + * @param start Start number for the acquired search result list + * @throws IOException If IO error + */ + public List search(String query, Integer size, Integer start) throws IOException { + List results = new ArrayList<>(); + try { + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("format", "xml"); + uriBuilder.addParameter("resulttype", "core"); + uriBuilder.addParameter("pageSize", String.valueOf(size)); + uriBuilder.addParameter("query", query); + Map> params = new HashMap>(); + boolean lastPage = false; + int skipped = 0; + while (!lastPage || results.size() < size) { + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + String cursorMark = StringUtils.EMPTY; + if (StringUtils.isNotBlank(response)) { + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(response)); + XPathFactory xpfac = XPathFactory.instance(); + XPathExpression xPath = xpfac.compile("//responseWrapper/resultList/result", + Filters.element()); + List records = xPath.evaluate(document); + if (records.size() > 0) { + for (Element item : records) { + if (start > skipped) { + skipped++; + } else { + results.add(transformSourceRecords(item)); + } + } + } else { + lastPage = true; + break; + } + Element root = document.getRootElement(); + Element nextCursorMark = root.getChild("nextCursorMark"); + cursorMark = Objects.nonNull(nextCursorMark) ? nextCursorMark.getValue() : StringUtils.EMPTY; + } + if (StringUtils.isNotBlank(cursorMark)) { + uriBuilder.setParameter("cursorMar", cursorMark); + } else { + lastPage = true; + } + } + } catch (URISyntaxException | JDOMException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + return results; + } + + private String buildURI(Integer pageSize, String query) throws URISyntaxException { + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("format", "xml"); + uriBuilder.addParameter("resulttype", "core"); + uriBuilder.addParameter("pageSize", String.valueOf(pageSize)); + uriBuilder.addParameter("query", query); + return uriBuilder.toString(); + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java index 2574e187dfc6..1f460c19e697 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/ris/service/RisImportMetadataSourceServiceImpl.java @@ -126,10 +126,10 @@ private List notAggregatedData(InputStream inputStrea } /** - * Retrieve the MetadataFieldMapping containing the mapping between RecordType + * Set the MetadataFieldMapping containing the mapping between RecordType * (in this case PlainMetadataSourceDto.class) and Metadata * - * @return The configured MetadataFieldMapping + * @param metadataFieldMap The configured MetadataFieldMapping */ @Override @SuppressWarnings("unchecked") diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloFieldMapping.java new file mode 100644 index 000000000000..0d7183a1f058 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloFieldMapping.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.scielo.service; +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Scielo metadatum fields on the DSpace metadatum fields + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class ScieloFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and + * metadata that will be set to the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "scieloMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..4f83ffe978f7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloImportMetadataSourceServiceImpl.java @@ -0,0 +1,263 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.scielo.service; + +import java.io.BufferedReader; +import java.io.StringReader; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.el.MethodNotFoundException; +import javax.ws.rs.BadRequestException; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.http.client.utils.URIBuilder; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.FileSourceException; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Scielo + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class ScieloImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService>> + implements QuerySource { + + /** + * This pattern is used when reading the Scielo response, + * to check if the fields you are reading is in rid format + */ + private static final String PATTERN = "^([A-Z][A-Z0-9]) - (.*)$"; + + /** + * This pattern is used to verify correct format of ScieloId + */ + private static final String ID_PATTERN = "^(.....)-(.*)-(...)$"; + + private int timeout = 1000; + + private String url; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public void init() throws Exception {} + + @Override + public String getImportSource() { + return "scielo"; + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByQueryCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new FindByIdCallable(id)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new SearchNBByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scielo"); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scielo"); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scielo"); + } + + /** + * This class is a Callable implementation to count the number of entries for an Scielo query + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchNBByQueryCallable implements Callable { + + private String query; + + private SearchNBByQueryCallable(String queryString) { + this.query = queryString; + } + + private SearchNBByQueryCallable(Query query) { + this.query = query.getParameterAsClass("query", String.class); + } + + @Override + public Integer call() throws Exception { + Map> params = new HashMap>(); + URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(query, StandardCharsets.UTF_8)); + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + Map>> records = getRecords(resp); + return Objects.nonNull(records.size()) ? records.size() : 0; + } + } + + /** + * This class is a Callable implementation to get an Scielo entry using ScieloID + * The ScieloID to use can be passed through the constructor as a String + * or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class FindByIdCallable implements Callable> { + + private String id; + + private FindByIdCallable(String id) { + this.id = id; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String scieloId = id.trim(); + Pattern risPattern = Pattern.compile(ID_PATTERN); + Matcher risMatcher = risPattern.matcher(scieloId); + if (risMatcher.matches()) { + Map> params = new HashMap>(); + URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(scieloId, StandardCharsets.UTF_8)); + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + Map>> records = getRecords(resp); + if (Objects.nonNull(records) & !records.isEmpty()) { + results.add(transformSourceRecords(records.get(1))); + } + } else { + throw new BadRequestException("id provided : " + scieloId + " is not an ScieloID"); + } + return results; + } + } + + /** + * This class is a Callable implementation to get Scielo entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("start", start); + query.addParameter("count", maxResult); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String q = query.getParameterAsClass("query", String.class); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(q, StandardCharsets.UTF_8)); + uriBuilder.addParameter("start", start.toString()); + uriBuilder.addParameter("count", count.toString()); + Map> params = new HashMap>(); + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + Map>> records = getRecords(resp); + for (int record : records.keySet()) { + results.add(transformSourceRecords(records.get(record))); + } + return results; + } + } + + private Map>> getRecords(String resp) throws FileSourceException { + Map>> records = new HashMap>>(); + BufferedReader reader; + int countRecord = 0; + try { + reader = new BufferedReader(new StringReader(resp)); + String line; + while ((line = reader.readLine()) != null) { + if (line.isEmpty() || line.equals("") || line.matches("^\\s*$")) { + continue; + } + line = line.replaceAll("\\uFEFF", "").trim(); + Pattern risPattern = Pattern.compile(PATTERN); + Matcher risMatcher = risPattern.matcher(line); + if (risMatcher.matches()) { + if (risMatcher.group(1).equals("TY") & risMatcher.group(2).equals("JOUR")) { + countRecord ++; + Map> newMap = new HashMap>(); + records.put(countRecord, newMap); + } else { + Map> tag2values = records.get(countRecord); + List values = tag2values.get(risMatcher.group(1)); + if (Objects.isNull(values)) { + List newValues = new ArrayList(); + newValues.add(risMatcher.group(2)); + tag2values.put(risMatcher.group(1), newValues); + } else { + values.add(risMatcher.group(2)); + tag2values.put(risMatcher.group(1), values); + } + } + } + } + } catch (Exception e) { + throw new FileSourceException("Cannot parse RIS file", e); + } + return records; + } + + public void setUrl(String url) { + this.url = url; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusFieldMapping.java new file mode 100644 index 000000000000..c8143339b483 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusFieldMapping.java @@ -0,0 +1,38 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.scopus.service; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Scopus metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class ScopusFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "scopusMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..944d467e3156 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/scopus/service/ScopusImportMetadataSourceServiceImpl.java @@ -0,0 +1,425 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.scopus.service; + +import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.URI_PARAMETERS; + +import java.io.IOException; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.el.MethodNotFoundException; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.DoiCheck; +import org.dspace.importer.external.service.components.QuerySource; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Scopus + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science dot com) + */ +public class ScopusImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private int timeout = 1000; + + int itemPerPage = 25; + + private String url; + private String apiKey; + private String instKey; + private String viewMode; + + @Autowired + private LiveImportClient liveImportClient; + + public LiveImportClient getLiveImportClient() { + return liveImportClient; + } + + public void setLiveImportClient(LiveImportClient liveImportClient) { + this.liveImportClient = liveImportClient; + } + + @Override + public void init() throws Exception {} + + /** + * The string that identifies this import implementation. Preferable a URI + * + * @return the identifying uri + */ + @Override + public String getImportSource() { + return "scopus"; + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + if (isEID(query)) { + return retry(new FindByIdCallable(query)).size(); + } + if (DoiCheck.isDoi(query)) { + query = DoiCheck.purgeDoiValue(query); + } + return retry(new SearchNBByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + if (isEID(query.toString())) { + return retry(new FindByIdCallable(query.toString())).size(); + } + if (DoiCheck.isDoi(query.toString())) { + query.addParameter("query", DoiCheck.purgeDoiValue(query.toString())); + } + return retry(new SearchNBByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, + int count) throws MetadataSourceException { + if (isEID(query)) { + return retry(new FindByIdCallable(query)); + } + if (DoiCheck.isDoi(query)) { + query = DoiCheck.purgeDoiValue(query); + } + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) + throws MetadataSourceException { + if (isEID(query.toString())) { + return retry(new FindByIdCallable(query.toString())); + } + if (DoiCheck.isDoi(query.toString())) { + query.addParameter("query", DoiCheck.purgeDoiValue(query.toString())); + } + return retry(new SearchByQueryCallable(query)); + } + + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = null; + if (DoiCheck.isDoi(query.toString())) { + query.addParameter("query", DoiCheck.purgeDoiValue(query.toString())); + } + if (isEID(query.toString())) { + records = retry(new FindByIdCallable(query.toString())); + } else { + records = retry(new SearchByQueryCallable(query)); + } + return records == null || records.isEmpty() ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Item item) + throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scopus"); + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new FindByIdCallable(id)); + return records == null || records.isEmpty() ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Query query) + throws MetadataSourceException { + if (isEID(query.toString())) { + return retry(new FindByIdCallable(query.toString())); + } + if (DoiCheck.isDoi(query.toString())) { + query.addParameter("query", DoiCheck.purgeDoiValue(query.toString())); + } + return retry(new FindByQueryCallable(query)); + } + + private boolean isEID(String query) { + Pattern pattern = Pattern.compile("2-s2\\.0-\\d+"); + Matcher match = pattern.matcher(query); + if (match.matches()) { + return true; + } + return false; + } + + /** + * This class implements a callable to get the numbers of result + */ + private class SearchNBByQueryCallable implements Callable { + + private String query; + + private SearchNBByQueryCallable(String queryString) { + this.query = queryString; + } + + private SearchNBByQueryCallable(Query query) { + this.query = query.getParameterAsClass("query", String.class); + } + + @Override + public Integer call() throws Exception { + if (StringUtils.isNotBlank(apiKey)) { + // Execute the request. + Map> params = new HashMap>(); + Map requestParams = getRequestParameters(query, null, null, null); + params.put(URI_PARAMETERS, requestParams); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList( + Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/")); + XPathExpression xpath = XPathFactory.instance() + .compile("opensearch:totalResults", Filters.element(), null, namespaces); + + Element count = xpath.evaluateFirst(root); + try { + return Integer.parseInt(count.getText()); + } catch (NumberFormatException e) { + return null; + } + } + return null; + } + } + + /** + * This class is a Callable implementation to get a Scopus entry using EID + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class FindByIdCallable implements Callable> { + + private String eid; + + private FindByIdCallable(String eid) { + this.eid = eid; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String queryString = "EID(" + eid.replace("!", "/") + ")"; + if (StringUtils.isNotBlank(apiKey)) { + Map> params = new HashMap>(); + Map requestParams = getRequestParameters(queryString, viewMode, null, null); + params.put(URI_PARAMETERS, requestParams); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + List elements = splitToRecords(response); + for (Element record : elements) { + results.add(transformSourceRecords(record)); + } + } + return results; + } + } + + /** + * This class implements a callable to get the items based on query parameters + */ + private class FindByQueryCallable implements Callable> { + + private String title; + private String author; + private Integer year; + private Integer start; + private Integer count; + + private FindByQueryCallable(Query query) { + this.title = query.getParameterAsClass("title", String.class); + this.year = query.getParameterAsClass("year", Integer.class); + this.author = query.getParameterAsClass("author", String.class); + this.start = query.getParameterAsClass("start", Integer.class) != null ? + query.getParameterAsClass("start", Integer.class) : 0; + this.count = query.getParameterAsClass("count", Integer.class) != null ? + query.getParameterAsClass("count", Integer.class) : 20; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String queryString = ""; + StringBuffer query = new StringBuffer(); + if (StringUtils.isNotBlank(title)) { + query.append("title(").append(title).append(""); + } + if (StringUtils.isNotBlank(author)) { + // [FAU] + if (query.length() > 0) { + query.append(" AND "); + } + query.append("AUTH(").append(author).append(")"); + } + if (year != -1) { + // [DP] + if (query.length() > 0) { + query.append(" AND "); + } + query.append("PUBYEAR IS ").append(year); + } + queryString = query.toString(); + + if (apiKey != null && !apiKey.equals("")) { + Map> params = new HashMap>(); + Map requestParams = getRequestParameters(queryString, viewMode, start, count); + params.put(URI_PARAMETERS, requestParams); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + List elements = splitToRecords(response); + for (Element record : elements) { + results.add(transformSourceRecords(record)); + } + } + return results; + } + } + + /** + * Find records matching a string query. + * + * @param query A query string to base the search on. + * @param start Offset to start at + * @param count Number of records to retrieve. + * @return A set of records. Fully transformed. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByQueryCallable implements Callable> { + private Query query; + + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("start", start); + query.addParameter("count", maxResult); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String queryString = query.getParameterAsClass("query", String.class); + Integer start = query.getParameterAsClass("start", Integer.class); + Integer count = query.getParameterAsClass("count", Integer.class); + if (StringUtils.isNotBlank(apiKey)) { + Map> params = new HashMap>(); + Map requestParams = getRequestParameters(queryString, viewMode, start, count); + params.put(URI_PARAMETERS, requestParams); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + List elements = splitToRecords(response); + for (Element record : elements) { + results.add(transformSourceRecords(record)); + } + } + return results; + } + } + + private Map getRequestParameters(String query, String viewMode, Integer start, Integer count) { + Map params = new HashMap(); + params.put("httpAccept", "application/xml"); + params.put("apiKey", apiKey); + params.put("query", query); + + if (StringUtils.isNotBlank(instKey)) { + params.put("insttoken", instKey); + } + if (StringUtils.isNotBlank(viewMode)) { + params.put("view", viewMode); + } + + params.put("start", (Objects.nonNull(start) ? start + "" : "0")); + params.put("count", (Objects.nonNull(count) ? count + "" : "20")); + return params; + } + + private List splitToRecords(String recordsSrc) { + try { + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + List records = root.getChildren("entry",Namespace.getNamespace("http://www.w3.org/2005/Atom")); + return records; + } catch (JDOMException | IOException e) { + return new ArrayList(); + } + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getViewMode() { + return viewMode; + } + + public void setViewMode(String viewMode) { + this.viewMode = viewMode; + } + + public String getApiKey() { + return apiKey; + } + + public String getInstKey() { + return instKey; + } + + public void setApiKey(String apiKey) { + this.apiKey = apiKey; + } + + public void setInstKey(String instKey) { + this.instKey = instKey; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/DoiCheck.java b/dspace-api/src/main/java/org/dspace/importer/external/service/DoiCheck.java new file mode 100644 index 000000000000..95d42e3a27da --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/DoiCheck.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.service; + +import java.util.Arrays; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Utility class that provides methods to check if a given string is a DOI + * + * @author Corrado Lombardi (corrado.lombardi at 4science.it) + */ +public class DoiCheck { + + private static final List DOI_PREFIXES = Arrays.asList("http://dx.doi.org/", "https://dx.doi.org/"); + + private static final Pattern PATTERN = Pattern.compile("10.\\d{4,9}/[-._;()/:A-Z0-9]+" + + "|10.1002/[^\\s]+" + + "|10.\\d{4}/\\d+-\\d+X?(\\d+)" + + "\\d+<[\\d\\w]+:[\\d\\w]*>\\d+.\\d+.\\w+;\\d" + + "|10.1021/\\w\\w\\d++" + + "|10.1207/[\\w\\d]+\\&\\d+_\\d+", + Pattern.CASE_INSENSITIVE); + + private DoiCheck() {} + + public static boolean isDoi(final String value) { + Matcher m = PATTERN.matcher(purgeDoiValue(value)); + return m.matches(); + } + + public static String purgeDoiValue(final String query) { + String value = query.replaceAll(",", ""); + for (final String prefix : DOI_PREFIXES) { + value = value.replaceAll(prefix, ""); + } + return value.trim(); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java index 019cf33177c2..5d83b9a7cce4 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractPlainMetadataSource.java @@ -42,7 +42,7 @@ public abstract class AbstractPlainMetadataSource /** * Set the file extensions supported by this metadata service * - * @param supportedExtensionsthe file extensions (xml,txt,...) supported by this service + * @param supportedExtensions the file extensions (xml,txt,...) supported by this service */ public void setSupportedExtensions(List supportedExtensions) { this.supportedExtensions = supportedExtensions; @@ -57,7 +57,7 @@ public List getSupportedExtensions() { * Return a list of ImportRecord constructed from input file. This list is based on * the results retrieved from the file (InputStream) parsed through abstract method readData * - * @param InputStream The inputStream of the file + * @param is The inputStream of the file * @return A list of {@link ImportRecord} * @throws FileSourceException if, for any reason, the file is not parsable */ @@ -76,7 +76,7 @@ public List getRecords(InputStream is) throws FileSourceException * the result retrieved from the file (InputStream) parsed through abstract method * "readData" implementation * - * @param InputStream The inputStream of the file + * @param is The inputStream of the file * @return An {@link ImportRecord} matching the file content * @throws FileSourceException if, for any reason, the file is not parsable * @throws FileMultipleOccurencesException if the file contains more than one entry diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java index 38632a1a2b72..29801433e3b3 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/AbstractRemoteMetadataSource.java @@ -183,6 +183,7 @@ protected T retry(Callable callable) throws MetadataSourceException { log.warn("Error in trying operation " + operationId + " " + retry + " " + warning + ", retrying !", e); } finally { + this.lastRequest = System.currentTimeMillis(); lock.unlock(); } @@ -262,5 +263,7 @@ protected void throwSourceExceptionHook() { */ public abstract void init() throws Exception; - + public void setInterRequestTime(final long interRequestTime) { + this.interRequestTime = interRequestTime; + } } diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java b/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java index 5bef0984df7f..13c81d15162b 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/components/FileSource.java @@ -30,7 +30,7 @@ public interface FileSource extends MetadataSource { /** * Return a list of ImportRecord constructed from input file. * - * @param InputStream The inputStream of the file + * @param inputStream The inputStream of the file * @return A list of {@link ImportRecord} * @throws FileSourceException if, for any reason, the file is not parsable */ @@ -40,7 +40,7 @@ public List getRecords(InputStream inputStream) /** * Return an ImportRecord constructed from input file. * - * @param InputStream The inputStream of the file + * @param inputStream The inputStream of the file * @return An {@link ImportRecord} matching the file content * @throws FileSourceException if, for any reason, the file is not parsable * @throws FileMultipleOccurencesException if the file contains more than one entry diff --git a/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..a4f90fa5ba61 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java @@ -0,0 +1,339 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.vufind; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying VuFind + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class VuFindImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String urlSearch; + + private String fields; + + @Autowired + private LiveImportClient liveImportClient; + + public VuFindImportMetadataSourceServiceImpl(String fields) { + this.fields = fields; + } + + @Override + public String getImportSource() { + return "VuFind"; + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + String records = retry(new GetByVuFindIdCallable(id, fields)); + List importRecords = extractMetadataFromRecordList(records); + return importRecords != null && !importRecords.isEmpty() ? importRecords.get(0) : null; + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + String records = retry(new SearchByQueryCallable(query, count, start, fields)); + return extractMetadataFromRecordList(records); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + String records = retry(new SearchByQueryCallable(query, fields)); + return extractMetadataFromRecordList(records); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + String records = retry(new SearchByQueryCallable(query, fields)); + List importRecords = extractMetadataFromRecordList(records); + return importRecords != null && !importRecords.isEmpty() ? importRecords.get(0) : null; + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + String records = retry(new FindMatchingRecordsCallable(query)); + return extractMetadataFromRecordList(records); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for VuFind"); + } + + @Override + public void init() throws Exception {} + + /** + * This class is a Callable implementation to count the number of entries for an VuFind query. + * This Callable use as query value to CrossRef the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + + private Query query; + + public CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + public CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + Integer start = 0; + Integer count = 1; + int page = start / count + 1; + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("type", "AllField"); + uriBuilder.addParameter("page", String.valueOf(page)); + uriBuilder.addParameter("limit", count.toString()); + uriBuilder.addParameter("prettyPrint", String.valueOf(true)); + uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class)); + Map> params = new HashMap>(); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode node = convertStringJsonToJsonNode(responseString); + JsonNode resultCountNode = node.get("resultCount"); + return resultCountNode.intValue(); + } + } + + /** + * This class is a Callable implementation to get an VuFind entry using VuFind id + * The id to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class GetByVuFindIdCallable implements Callable { + + private String id; + + private String fields; + + public GetByVuFindIdCallable(String id, String fields) { + this.id = id; + if (fields != null && fields.length() > 0) { + this.fields = fields; + } else { + this.fields = null; + } + } + + @Override + public String call() throws Exception { + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("id", id); + uriBuilder.addParameter("prettyPrint", "false"); + if (StringUtils.isNotBlank(fields)) { + for (String field : fields.split(",")) { + uriBuilder.addParameter("field[]", field); + } + } + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + return response; + } + } + + /** + * This class is a Callable implementation to get VuFind entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable { + + private Query query; + + private String fields; + + public SearchByQueryCallable(String queryString, Integer maxResult, Integer start, String fields) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + if (StringUtils.isNotBlank(fields)) { + this.fields = fields; + } else { + this.fields = null; + } + } + + public SearchByQueryCallable(Query query, String fields) { + this.query = query; + if (StringUtils.isNotBlank(fields)) { + this.fields = fields; + } else { + this.fields = null; + } + } + + @Override + public String call() throws Exception { + Integer start = query.getParameterAsClass("start", Integer.class); + Integer count = query.getParameterAsClass("count", Integer.class); + int page = count != 0 ? start / count : 0; + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("type", "AllField"); + //page looks 1 based (start = 0, count = 20 -> page = 0) + uriBuilder.addParameter("page", String.valueOf(page + 1)); + uriBuilder.addParameter("limit", count.toString()); + uriBuilder.addParameter("prettyPrint", String.valueOf(true)); + uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class)); + if (StringUtils.isNotBlank(fields)) { + for (String field : fields.split(",")) { + uriBuilder.addParameter("field[]", field); + } + } + Map> params = new HashMap>(); + return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } + + } + + /** + * This class is a Callable implementation to search VuFind entries using author and title. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + public class FindMatchingRecordsCallable implements Callable { + + private Query query; + + private String fields; + + public FindMatchingRecordsCallable(Query query) { + this.query = query; + } + + @Override + public String call() throws Exception { + String author = query.getParameterAsClass("author", String.class); + String title = query.getParameterAsClass("title", String.class); + Integer start = query.getParameterAsClass("start", Integer.class); + Integer count = query.getParameterAsClass("count", Integer.class); + int page = count != 0 ? start / count : 0; + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("type", "AllField"); + //pagination is 1 based (first page: start = 0, count = 20 -> page = 0 -> +1 = 1) + uriBuilder.addParameter("page", String.valueOf(page ++)); + uriBuilder.addParameter("limit", count.toString()); + uriBuilder.addParameter("prettyPrint", "true"); + if (fields != null && !fields.isEmpty()) { + for (String field : fields.split(",")) { + uriBuilder.addParameter("field[]", field); + } + } + String filter = StringUtils.EMPTY; + if (StringUtils.isNotBlank(author)) { + filter = "author:" + author; + } + if (StringUtils.isNotBlank(title)) { + if (StringUtils.isNotBlank(filter)) { + filter = filter + " AND title:" + title; + } else { + filter = "title:" + title; + } + } + uriBuilder.addParameter("lookfor", filter); + Map> params = new HashMap>(); + return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } + + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + private List extractMetadataFromRecordList(String records) { + List recordsResult = new ArrayList<>(); + JsonNode jsonNode = convertStringJsonToJsonNode(records); + JsonNode node = jsonNode.get("records"); + if (Objects.nonNull(node) && node.isArray()) { + Iterator nodes = node.iterator(); + while (nodes.hasNext()) { + recordsResult.add(transformSourceRecords(nodes.next().toString())); + } + } + return recordsResult; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getUrlSearch() { + return urlSearch; + } + + public void setUrlSearch(String urlSearch) { + this.urlSearch = urlSearch; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/vufind/metadatamapping/VuFindFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/vufind/metadatamapping/VuFindFieldMapping.java new file mode 100644 index 000000000000..b14927a14ccc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/vufind/metadatamapping/VuFindFieldMapping.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.vufind.metadatamapping; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the VuFind metadatum fields on the DSpace metadatum fields + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +@SuppressWarnings("rawtypes") +public class VuFindFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "vufindMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSFieldMapping.java new file mode 100644 index 000000000000..be4acfbcea8c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSFieldMapping.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.wos.service; +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Web of Science metadatum fields on the DSpace metadatum fields + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class WOSFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve + * metadata and metadata that will be set to the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "wosMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java new file mode 100644 index 000000000000..f550b659952b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/wos/service/WOSImportMetadataSourceServiceImpl.java @@ -0,0 +1,333 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.wos.service; + +import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS; + +import java.io.IOException; +import java.io.StringReader; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.el.MethodNotFoundException; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.DoiCheck; +import org.dspace.importer.external.service.components.QuerySource; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Web of Science. + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class WOSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private static final String AI_PATTERN = "^AI=(.*)"; + private static final Pattern ISI_PATTERN = Pattern.compile("^\\d{15}$"); + + private int timeout = 1000; + + private String url; + private String urlSearch; + private String apiKey; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public void init() throws Exception {} + + /** + * The string that identifies this import implementation. Preferable a URI + * + * @return the identifying uri + */ + @Override + public String getImportSource() { + return "wos"; + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByQueryCallable(query)); + return records == null || records.isEmpty() ? null : records.get(0); + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new FindByIdCallable(id)); + return records == null || records.isEmpty() ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new SearchNBByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for WOS"); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for WOS"); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for WOS"); + } + + /** + * This class implements a callable to get the numbers of result + */ + private class SearchNBByQueryCallable implements Callable { + + private String query; + + private SearchNBByQueryCallable(String queryString) { + this.query = queryString; + } + + private SearchNBByQueryCallable(Query query) { + this.query = query.getParameterAsClass("query", String.class); + } + + @Override + public Integer call() throws Exception { + if (StringUtils.isNotBlank(apiKey)) { + String queryString = URLEncoder.encode(checkQuery(query), StandardCharsets.UTF_8); + String url = urlSearch + queryString + "&count=1&firstRecord=1"; + Map> params = new HashMap>(); + params.put(HEADER_PARAMETERS, getRequestParameters()); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + XPathExpression xpath = XPathFactory.instance().compile("//*[@name=\"RecordsFound\"]", + Filters.element(), null); + Element tot = xpath.evaluateFirst(root); + return Integer.valueOf(tot.getValue()); + } + return null; + } + } + + /** + * This class is a Callable implementation to get a Web of Science entry using Doi + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class FindByIdCallable implements Callable> { + + private String doi; + + private FindByIdCallable(String doi) { + this.doi = URLEncoder.encode(doi, StandardCharsets.UTF_8); + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + if (StringUtils.isNotBlank(apiKey)) { + String urlString = url + this.doi + "?databaseId=WOS&lang=en&count=10&firstRecord=1"; + Map> params = new HashMap>(); + params.put(HEADER_PARAMETERS, getRequestParameters()); + String response = liveImportClient.executeHttpGetRequest(timeout, urlString, params); + + List elements = splitToRecords(response); + for (Element record : elements) { + results.add(transformSourceRecords(record)); + } + } + return results; + } + } + + /** + * Find records matching a string query. + * + * @param query A query string to base the search on. + * @param start Offset to start at + * @param count Number of records to retrieve. + * @return A set of records. Fully transformed. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("start", start); + query.addParameter("count", maxResult); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String queryString = checkQuery(query.getParameterAsClass("query", String.class)); + Integer start = query.getParameterAsClass("start", Integer.class); + Integer count = query.getParameterAsClass("count", Integer.class); + if (StringUtils.isNotBlank(apiKey)) { + Map> params = new HashMap>(); + params.put(HEADER_PARAMETERS, getRequestParameters()); + String url = urlSearch + URLEncoder.encode(queryString, StandardCharsets.UTF_8) + + "&count=" + count + "&firstRecord=" + (start + 1); + String response = liveImportClient.executeHttpGetRequest(timeout, url, params); + + List omElements = splitToRecords(response); + for (Element el : omElements) { + results.add(transformSourceRecords(el)); + } + } + return results; + } + + } + + private Map getRequestParameters() { + Map params = new HashMap(); + params.put("Accept", "application/xml"); + params.put("X-ApiKey", this.apiKey); + return params; + } + + /** + * This method check if the query contain + * "AI=(...)" Author Identifier or a DOI "DO=(query)" + * or Accession Number "UT=(query)". + * Otherwise the value is placed in TS=(query) tag + * that searches for topic terms in the following fields within a document: + * Title, Abstract, Author keywords, Keywords Plus + * + * @param query + */ + private String checkQuery(String query) { + Pattern risPattern = Pattern.compile(AI_PATTERN); + Matcher risMatcher = risPattern.matcher(query.trim()); + if (risMatcher.matches()) { + return query; + } + if (DoiCheck.isDoi(query)) { + // FIXME: workaround to be removed once fixed by the community the double post of query param + if (query.startsWith(",")) { + query = query.substring(1); + } + return "DO=(" + query + ")"; + } else if (isIsi(query)) { + return "UT=(" + query + ")"; + } + StringBuilder queryBuilder = new StringBuilder("TS=("); + queryBuilder.append(query).append(")"); + return queryBuilder.toString(); + } + + private boolean isIsi(String query) { + if (query.startsWith("WOS:")) { + return true; + } + Matcher matcher = ISI_PATTERN.matcher(query.trim()); + return matcher.matches(); + } + + private List splitToRecords(String recordsSrc) { + try { + SAXBuilder saxBuilder = new SAXBuilder(); + // disallow DTD parsing to ensure no XXE attacks can occur + saxBuilder.setFeature("http://apache.org/xml/features/disallow-doctype-decl",true); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + String cData = XPathFactory.instance().compile("//*[@name=\"Records\"]", + Filters.element(), null).evaluate(root).get(0).getValue().trim(); + Document intDocument = saxBuilder.build(new StringReader(cData)); + XPathExpression xPath = XPathFactory.instance().compile("*", Filters.element(), null); + List records = xPath.evaluate(intDocument.getRootElement()); + if (CollectionUtils.isNotEmpty(records)) { + return records; + } + } catch (JDOMException | IOException e) { + log.error(e.getMessage()); + return new ArrayList(); + } + return new ArrayList(); + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getUrlSearch() { + return urlSearch; + } + + public void setUrlSearch(String urlSearch) { + this.urlSearch = urlSearch; + } + + public String getApiKey() { + return apiKey; + } + + public void setApiKey(String apiKey) { + this.apiKey = apiKey; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java index 0c061d2d6428..64450b796c17 100644 --- a/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java +++ b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorService.java @@ -10,7 +10,7 @@ import java.io.IOException; import java.util.Map; -import org.jdom.Document; +import org.jdom2.Document; /** * Service interface class for the Creative commons license connector service. diff --git a/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java index 792c25d62929..cdecadba5242 100644 --- a/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/license/CCLicenseConnectorServiceImpl.java @@ -32,13 +32,14 @@ import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.Logger; import org.dspace.services.ConfigurationService; -import org.jaxen.JaxenException; -import org.jaxen.jdom.JDOMXPath; -import org.jdom.Attribute; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.JDOMException; -import org.jdom.input.SAXBuilder; +import org.jdom2.Attribute; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.xml.sax.InputSource; @@ -96,7 +97,7 @@ public Map retrieveLicenses(String language) { List licenses; try (CloseableHttpResponse response = client.execute(httpGet)) { licenses = retrieveLicenses(response); - } catch (JDOMException | JaxenException | IOException e) { + } catch (JDOMException | IOException e) { log.error("Error while retrieving the license details using url: " + uri, e); licenses = Collections.emptyList(); } @@ -105,12 +106,12 @@ public Map retrieveLicenses(String language) { for (String license : licenses) { - String licenseUri = ccLicenseUrl + "/license/" + license; + String licenseUri = ccLicenseUrl + "/license/" + license + "?locale=" + language; HttpGet licenseHttpGet = new HttpGet(licenseUri); try (CloseableHttpResponse response = client.execute(licenseHttpGet)) { CCLicense ccLicense = retrieveLicenseObject(license, response); ccLicenses.put(ccLicense.getLicenseId(), ccLicense); - } catch (JaxenException | JDOMException | IOException e) { + } catch (JDOMException | IOException e) { log.error("Error while retrieving the license details using url: " + licenseUri, e); } } @@ -125,25 +126,23 @@ public Map retrieveLicenses(String language) { * @param response The response from the API * @return a list of license identifiers for which details need to be retrieved * @throws IOException - * @throws JaxenException * @throws JDOMException */ private List retrieveLicenses(CloseableHttpResponse response) - throws IOException, JaxenException, JDOMException { + throws IOException, JDOMException { List domains = new LinkedList<>(); String[] excludedLicenses = configurationService.getArrayProperty("cc.license.classfilter"); - String responseString = EntityUtils.toString(response.getEntity()); - JDOMXPath licenseClassXpath = new JDOMXPath("//licenses/license"); - + XPathExpression licenseClassXpath = + XPathFactory.instance().compile("//licenses/license", Filters.element()); try (StringReader stringReader = new StringReader(responseString)) { InputSource is = new InputSource(stringReader); - org.jdom.Document classDoc = this.parser.build(is); + org.jdom2.Document classDoc = this.parser.build(is); - List elements = licenseClassXpath.selectNodes(classDoc); + List elements = licenseClassXpath.evaluate(classDoc); for (Element element : elements) { String licenseId = getSingleNodeValue(element, "@id"); if (StringUtils.isNotBlank(licenseId) && !ArrayUtils.contains(excludedLicenses, licenseId)) { @@ -163,30 +162,29 @@ private List retrieveLicenses(CloseableHttpResponse response) * @param response for a specific CC License response * @return the corresponding CC License Object * @throws IOException - * @throws JaxenException * @throws JDOMException */ private CCLicense retrieveLicenseObject(final String licenseId, CloseableHttpResponse response) - throws IOException, JaxenException, JDOMException { + throws IOException, JDOMException { String responseString = EntityUtils.toString(response.getEntity()); - - JDOMXPath licenseClassXpath = new JDOMXPath("//licenseclass"); - JDOMXPath licenseFieldXpath = new JDOMXPath("field"); - + XPathExpression licenseClassXpath = + XPathFactory.instance().compile("//licenseclass", Filters.fpassthrough()); + XPathExpression licenseFieldXpath = + XPathFactory.instance().compile("field", Filters.element()); try (StringReader stringReader = new StringReader(responseString)) { InputSource is = new InputSource(stringReader); - org.jdom.Document classDoc = this.parser.build(is); + org.jdom2.Document classDoc = this.parser.build(is); - Object element = licenseClassXpath.selectSingleNode(classDoc); + Object element = licenseClassXpath.evaluateFirst(classDoc); String licenseLabel = getSingleNodeValue(element, "label"); List ccLicenseFields = new LinkedList<>(); - List licenseFields = licenseFieldXpath.selectNodes(element); + List licenseFields = licenseFieldXpath.evaluate(element); for (Element licenseField : licenseFields) { CCLicenseField ccLicenseField = parseLicenseField(licenseField); ccLicenseFields.add(ccLicenseField); @@ -196,13 +194,14 @@ private CCLicense retrieveLicenseObject(final String licenseId, CloseableHttpRes } } - private CCLicenseField parseLicenseField(final Element licenseField) throws JaxenException { + private CCLicenseField parseLicenseField(final Element licenseField) { String id = getSingleNodeValue(licenseField, "@id"); String label = getSingleNodeValue(licenseField, "label"); String description = getSingleNodeValue(licenseField, "description"); - JDOMXPath enumXpath = new JDOMXPath("enum"); - List enums = enumXpath.selectNodes(licenseField); + XPathExpression enumXpath = + XPathFactory.instance().compile("enum", Filters.element()); + List enums = enumXpath.evaluate(licenseField); List ccLicenseFieldEnumList = new LinkedList<>(); @@ -215,7 +214,7 @@ private CCLicenseField parseLicenseField(final Element licenseField) throws Jaxe } - private CCLicenseFieldEnum parseEnum(final Element enumElement) throws JaxenException { + private CCLicenseFieldEnum parseEnum(final Element enumElement) { String id = getSingleNodeValue(enumElement, "@id"); String label = getSingleNodeValue(enumElement, "label"); String description = getSingleNodeValue(enumElement, "description"); @@ -236,9 +235,10 @@ private String getNodeValue(final Object el) { } } - private String getSingleNodeValue(final Object t, String query) throws JaxenException { - JDOMXPath xpath = new JDOMXPath(query); - Object singleNode = xpath.selectSingleNode(t); + private String getSingleNodeValue(final Object t, String query) { + XPathExpression xpath = + XPathFactory.instance().compile(query, Filters.fpassthrough()); + Object singleNode = xpath.evaluateFirst(t); return getNodeValue(singleNode); } @@ -273,7 +273,7 @@ public String retrieveRightsByQuestion(String licenseId, try (CloseableHttpResponse response = client.execute(httpPost)) { return retrieveLicenseUri(response); - } catch (JDOMException | JaxenException | IOException e) { + } catch (JDOMException | IOException e) { log.error("Error while retrieving the license uri for license : " + licenseId + " with answers " + answerMap.toString(), e); } @@ -286,21 +286,20 @@ public String retrieveRightsByQuestion(String licenseId, * @param response for a specific CC License URI response * @return the corresponding CC License URI as a string * @throws IOException - * @throws JaxenException * @throws JDOMException */ private String retrieveLicenseUri(final CloseableHttpResponse response) - throws IOException, JaxenException, JDOMException { + throws IOException, JDOMException { String responseString = EntityUtils.toString(response.getEntity()); - JDOMXPath licenseClassXpath = new JDOMXPath("//result/license-uri"); - + XPathExpression licenseClassXpath = + XPathFactory.instance().compile("//result/license-uri", Filters.fpassthrough()); try (StringReader stringReader = new StringReader(responseString)) { InputSource is = new InputSource(stringReader); - org.jdom.Document classDoc = this.parser.build(is); + org.jdom2.Document classDoc = this.parser.build(is); - Object node = licenseClassXpath.selectSingleNode(classDoc); + Object node = licenseClassXpath.evaluateFirst(classDoc); String nodeValue = getNodeValue(node); if (StringUtils.isNotBlank(nodeValue)) { @@ -364,12 +363,7 @@ public Document retrieveLicenseRDFDoc(String licenseURI) throws IOException { * @return the license name */ public String retrieveLicenseName(final Document doc) { - try { - return getSingleNodeValue(doc, "//result/license-name"); - } catch (JaxenException e) { - log.error("Error while retrieving the license name from the license document", e); - } - return null; + return getSingleNodeValue(doc, "//result/license-name"); } } diff --git a/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java b/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java index 40e727d9df3d..c9c8127d1844 100644 --- a/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/license/CreativeCommonsServiceImpl.java @@ -40,8 +40,8 @@ import org.dspace.license.service.CreativeCommonsService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Document; -import org.jdom.transform.JDOMSource; +import org.jdom2.Document; +import org.jdom2.transform.JDOMSource; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -67,7 +67,7 @@ public class CreativeCommonsServiceImpl implements CreativeCommonsService, Initi protected static final String BSN_LICENSE_URL = "license_url"; /** - * @deprecated to make uniform JSPUI and XMLUI approach the bitstream with the license in the textual format it + * @deprecated the bitstream with the license in the textual format it * is no longer stored (see https://jira.duraspace.org/browse/DS-2604) */ @Deprecated @@ -219,7 +219,7 @@ public String getLicenseURL(Context context, Item item) throws SQLException, IOE return getLicenseURI(item); } - // JSPUI backward compatibility see https://jira.duraspace.org/browse/DS-2604 + // backward compatibility see https://jira.duraspace.org/browse/DS-2604 return getStringFromBitstream(context, item, BSN_LICENSE_URL); } @@ -430,9 +430,10 @@ private void removeLicenseField(Context context, Item item, String field) throws } - private void addLicenseField(Context context, Item item, String field, String value) throws SQLException { + private void addLicenseField(Context context, Item item, String field, String language, String value) + throws SQLException { String[] params = splitField(field); - itemService.addMetadata(context, item, params[0], params[1], params[2], params[3], value); + itemService.addMetadata(context, item, params[0], params[1], params[2], language, value); } @@ -605,7 +606,10 @@ public Map retrieveFullAnswerMap(String licenseId, String langua } } - updateJurisdiction(fullParamMap); + // Replace the jurisdiction unless default value is set to none + if (!"none".equals(jurisdiction)) { + updateJurisdiction(fullParamMap); + } return fullParamMap; } @@ -688,12 +692,12 @@ public void addLicense(Context context, Item item, String licenseUri, String lic String uriField = getCCField("uri"); String nameField = getCCField("name"); - addLicenseField(context, item, uriField, licenseUri); + addLicenseField(context, item, uriField, null, licenseUri); if (configurationService.getBooleanProperty("cc.submit.addbitstream")) { setLicenseRDF(context, item, fetchLicenseRDF(doc)); } if (configurationService.getBooleanProperty("cc.submit.setname")) { - addLicenseField(context, item, nameField, licenseName); + addLicenseField(context, item, nameField, "en", licenseName); } } diff --git a/dspace-api/src/main/java/org/dspace/license/LicenseCleanup.java b/dspace-api/src/main/java/org/dspace/license/LicenseCleanup.java index 55eeb8d314e6..0eac224bcda2 100644 --- a/dspace-api/src/main/java/org/dspace/license/LicenseCleanup.java +++ b/dspace-api/src/main/java/org/dspace/license/LicenseCleanup.java @@ -15,6 +15,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; +import java.nio.charset.StandardCharsets; import java.sql.SQLException; import java.util.Iterator; import java.util.List; @@ -38,7 +39,8 @@ import org.dspace.core.Context; /** - * Cleanup class for CC Licenses, corrects XML formating errors by replacing the license_rdf bitstream. + * Cleanup class for CC Licenses, corrects XML formatting errors by replacing + * the license_rdf bitstream. * * @author mdiggory */ @@ -130,7 +132,7 @@ protected static void handleItem(Context context, Item item) throws SQLException AuthorizeException, IOException { List bundles = itemService.getBundles(item, "CC-LICENSE"); - if (bundles == null || bundles.size() == 0) { + if (bundles == null || bundles.isEmpty()) { return; } @@ -138,7 +140,7 @@ protected static void handleItem(Context context, Item item) throws SQLException Bitstream bitstream = bundleService.getBitstreamByName(bundle, "license_rdf"); - String license_rdf = new String(copy(context, bitstream)); + String license_rdf = new String(copy(context, bitstream), StandardCharsets.UTF_8); /* quickly fix xml by ripping out offensive parts */ license_rdf = license_rdf.replaceFirst(" { + + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "orcid_history_id_seq") + @SequenceGenerator(name = "orcid_history_id_seq", sequenceName = "orcid_history_id_seq", allocationSize = 1) + private Integer id; + + /** + * The profile item. + */ + @ManyToOne + @JoinColumn(name = "owner_id") + protected Item profileItem; + + /** + * The synchronized item. + */ + @ManyToOne + @JoinColumn(name = "entity_id") + private Item entity; + + /** + * The identifier of the synchronized resource on ORCID side. For more details + * see https://info.orcid.org/faq/what-is-a-put-code/ + */ + @Column(name = "put_code") + private String putCode; + + /** + * The record type. Could be publication, funding or a profile's section. + */ + @Column(name = "record_type") + private String recordType; + + /** + * A description of the synchronized resource. + */ + @Lob + @Type(type = "org.hibernate.type.TextType") + @Column(name = "description") + private String description; + + /** + * The signature of the synchronized metadata. This is used when the entity is + * the owner itself. + */ + @Lob + @Type(type = "org.hibernate.type.TextType") + @Column(name = "metadata") + private String metadata; + + /** + * The operation performed on ORCID. + */ + @Enumerated(EnumType.STRING) + @Column(name = "operation") + private OrcidOperation operation; + + /** + * The response message incoming from ORCID. + */ + @Lob + @Type(type = "org.hibernate.type.TextType") + @Column(name = "response_message") + private String responseMessage; + + /** + * The timestamp of the synchronization attempt. + */ + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "timestamp_last_attempt") + private Date timestamp = new Date(); + + /** + * The HTTP status incoming from ORCID. + */ + @Column(name = "status") + private Integer status; + + public Integer getStatus() { + return status; + } + + public void setStatus(Integer status) { + this.status = status; + } + + public void setId(Integer id) { + this.id = id; + } + + @Override + public Integer getID() { + return id; + } + + public Item getProfileItem() { + return profileItem; + } + + public void setProfileItem(Item profileItem) { + this.profileItem = profileItem; + } + + public Item getEntity() { + return entity; + } + + public void setEntity(Item entity) { + this.entity = entity; + } + + public String getPutCode() { + return putCode; + } + + public void setPutCode(String putCode) { + this.putCode = putCode; + } + + public String getResponseMessage() { + return responseMessage; + } + + public void setResponseMessage(String responseMessage) { + this.responseMessage = responseMessage; + } + + public String getRecordType() { + return recordType; + } + + public void setRecordType(String recordType) { + this.recordType = recordType; + } + + public String getMetadata() { + return metadata; + } + + public void setMetadata(String metadata) { + this.metadata = metadata; + } + + public OrcidOperation getOperation() { + return operation; + } + + public void setOperation(OrcidOperation operation) { + this.operation = operation; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public Date getTimestamp() { + return timestamp; + } + + public void setTimestamp(Date timestamp) { + this.timestamp = timestamp; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidOperation.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidOperation.java new file mode 100644 index 000000000000..381e35e84d7c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidOperation.java @@ -0,0 +1,20 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid; + +/** + * Enum that models an ORCID synchronization operation. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidOperation { + INSERT, + UPDATE, + DELETE; +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java new file mode 100644 index 000000000000..65b66cd20c3e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java @@ -0,0 +1,221 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid; + +import static org.apache.commons.lang3.StringUtils.isEmpty; +import static org.apache.commons.lang3.StringUtils.isNotEmpty; + +import java.util.Objects; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.Lob; +import javax.persistence.ManyToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; + +import org.dspace.content.Item; +import org.dspace.core.ReloadableEntity; +import org.hibernate.annotations.Type; + +/** + * Entity that model a record on the ORCID synchronization queue. Each record in + * this table is associated with an profile item and the entity to be + * synchronized (which can be the profile itself, a publication or a + * project/funding). If the entity is the profile itself then the metadata field + * contains the signature of the information to be synchronized. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Entity +@Table(name = "orcid_queue") +public class OrcidQueue implements ReloadableEntity { + + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "orcid_queue_id_seq") + @SequenceGenerator(name = "orcid_queue_id_seq", sequenceName = "orcid_queue_id_seq", allocationSize = 1) + private Integer id; + + /** + * The profile item. + */ + @ManyToOne + @JoinColumn(name = "owner_id") + protected Item profileItem; + + /** + * The entity to be synchronized. + */ + @ManyToOne + @JoinColumn(name = "entity_id") + private Item entity; + + /** + * A description of the resource to be synchronized. + */ + @Lob + @Type(type = "org.hibernate.type.TextType") + @Column(name = "description") + private String description; + + /** + * The identifier of the resource to be synchronized on ORCID side (in case of + * update or deletion). For more details see + * https://info.orcid.org/faq/what-is-a-put-code/ + */ + @Column(name = "put_code") + private String putCode; + + /** + * The record type. Could be publication, funding or a profile's section. + */ + @Column(name = "record_type") + private String recordType; + + /** + * The signature of the metadata to be synchronized. This is used when the + * entity is the owner itself. + */ + @Lob + @Column(name = "metadata") + @Type(type = "org.hibernate.type.TextType") + private String metadata; + + /** + * The operation to be performed on ORCID. + */ + @Enumerated(EnumType.STRING) + @Column(name = "operation") + private OrcidOperation operation; + + /** + * Synchronization attempts already made for a particular record. + */ + @Column(name = "attempts") + private Integer attempts = 0; + + public boolean isInsertAction() { + return entity != null && isEmpty(putCode); + } + + public boolean isUpdateAction() { + return entity != null && isNotEmpty(putCode); + } + + public boolean isDeleteAction() { + return entity == null && isNotEmpty(putCode); + } + + public void setID(Integer id) { + this.id = id; + } + + @Override + public Integer getID() { + return this.id; + } + + public Item getProfileItem() { + return profileItem; + } + + public void setProfileItem(Item profileItem) { + this.profileItem = profileItem; + } + + public Item getEntity() { + return entity; + } + + public void setEntity(Item entity) { + this.entity = entity; + } + + public String getPutCode() { + return putCode; + } + + public void setPutCode(String putCode) { + this.putCode = putCode; + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + OrcidQueue other = (OrcidQueue) obj; + return Objects.equals(id, other.id); + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getRecordType() { + return recordType; + } + + public void setRecordType(String recordType) { + this.recordType = recordType; + } + + public String getMetadata() { + return metadata; + } + + public void setMetadata(String metadata) { + this.metadata = metadata; + } + + public OrcidOperation getOperation() { + return operation; + } + + public void setOperation(OrcidOperation operation) { + this.operation = operation; + } + + public Integer getAttempts() { + return attempts; + } + + public void setAttempts(Integer attempts) { + this.attempts = attempts; + } + + @Override + public String toString() { + return "OrcidQueue [id=" + id + ", profileItem=" + profileItem + ", entity=" + entity + ", description=" + + description + + ", putCode=" + putCode + ", recordType=" + recordType + ", metadata=" + metadata + ", operation=" + + operation + "]"; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidToken.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidToken.java new file mode 100644 index 000000000000..def289daf41e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidToken.java @@ -0,0 +1,85 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.OneToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; + +import org.dspace.content.Item; +import org.dspace.core.ReloadableEntity; +import org.dspace.eperson.EPerson; + +/** + * Entity that stores ORCID access-token related to a given eperson or a given + * profile item. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Entity +@Table(name = "orcid_token") +public class OrcidToken implements ReloadableEntity { + + @Id + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "orcid_token_id_seq") + @SequenceGenerator(name = "orcid_token_id_seq", sequenceName = "orcid_token_id_seq", allocationSize = 1) + private Integer id; + + @OneToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "eperson_id") + protected EPerson ePerson; + + @OneToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "profile_item_id") + private Item profileItem; + + @Column(name = "access_token") + private String accessToken; + + @Override + public Integer getID() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public EPerson getEPerson() { + return ePerson; + } + + public void setEPerson(EPerson eperson) { + this.ePerson = eperson; + } + + public Item getProfileItem() { + return profileItem; + } + + public void setProfileItem(Item profileItem) { + this.profileItem = profileItem; + } + + public String getAccessToken() { + return accessToken; + } + + public void setAccessToken(String accessToken) { + this.accessToken = accessToken; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClient.java b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClient.java new file mode 100644 index 000000000000..99d1920aa53a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClient.java @@ -0,0 +1,164 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.client; + +import java.util.List; +import java.util.Optional; + +import org.dspace.orcid.exception.OrcidClientException; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.orcid.jaxb.model.v3.release.record.Person; +import org.orcid.jaxb.model.v3.release.record.WorkBulk; +import org.orcid.jaxb.model.v3.release.record.summary.Works; + +/** + * Interface for classes that allow to contact ORCID. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidClient { + + /** + * Retrieves an /read-public access token using a client-credentials OAuth flow, + * or 2-step OAuth. + * + * @return the ORCID token + * @throws OrcidClientException if some error occurs during the exchange + */ + OrcidTokenResponseDTO getReadPublicAccessToken(); + + /** + * Exchange the authorization code for an ORCID iD and 3-legged access token. + * The authorization code expires upon use. + * + * @param code the authorization code + * @return the ORCID token + * @throws OrcidClientException if some error occurs during the exchange + */ + OrcidTokenResponseDTO getAccessToken(String code); + + /** + * Retrieves a summary of the ORCID person related to the given orcid. + * + * @param accessToken the access token + * @param orcid the orcid id of the record to retrieve + * @return the Person + * @throws OrcidClientException if some error occurs during the search + */ + Person getPerson(String accessToken, String orcid); + + /** + * Retrieves all the works related to the given orcid. + * + * @param accessToken the access token + * @param orcid the orcid id related to the works + * @return the Works + * @throws OrcidClientException if some error occurs during the search + */ + Works getWorks(String accessToken, String orcid); + + /** + * Retrieves all the works related to the given orcid. + * + * @param orcid the orcid id related to the works + * @return the Works + * @throws OrcidClientException if some error occurs during the search + */ + Works getWorks(String orcid); + + /** + * Retrieves all the works with the given putCodes related to the given orcid + * + * @param accessToken the access token + * @param orcid the orcid id + * @param putCodes the putCodes of the works to retrieve + * @return the Works + * @throws OrcidClientException if some error occurs during the search + */ + WorkBulk getWorkBulk(String accessToken, String orcid, List putCodes); + + /** + * Retrieves all the works with the given putCodes related to the given orcid + * + * @param orcid the orcid id + * @param putCodes the putCodes of the works to retrieve + * @return the Works + * @throws OrcidClientException if some error occurs during the search + */ + WorkBulk getWorkBulk(String orcid, List putCodes); + + /** + * Retrieves an object from ORCID with the given putCode related to the given + * orcid. + * + * @param accessToken the access token + * @param orcid the orcid id + * @param putCode the object's put code + * @param clazz the object's class + * @return the Object, if any + * @throws OrcidClientException if some error occurs during the search + * @throws IllegalArgumentException if the given object class is not an valid + * ORCID object + */ + Optional getObject(String accessToken, String orcid, String putCode, Class clazz); + + /** + * Retrieves an object from ORCID with the given putCode related to the given + * orcid using the public API. + * + * @param orcid the orcid id + * @param putCode the object's put code + * @param clazz the object's class + * @return the Object, if any + * @throws OrcidClientException if some error occurs during the search + * @throws IllegalArgumentException if the given object class is not an valid + * ORCID object + */ + Optional getObject(String orcid, String putCode, Class clazz); + + /** + * Push the given object to ORCID. + * + * @param accessToken the access token + * @param orcid the orcid id + * @param object the orcid object to push + * @return the orcid response if no error occurs + * @throws OrcidClientException if some error occurs during the push + * @throws IllegalArgumentException if the given object is not an valid ORCID + * object + */ + OrcidResponse push(String accessToken, String orcid, Object object); + + /** + * Update the object with the given putCode. + * + * @param accessToken the access token + * @param orcid the orcid id + * @param object the orcid object to push + * @param putCode the put code of the resource to delete + * @return the orcid response if no error occurs + * @throws OrcidClientException if some error occurs during the push + * @throws IllegalArgumentException if the given object is not an valid ORCID + * object + */ + OrcidResponse update(String accessToken, String orcid, Object object, String putCode); + + /** + * Delete the ORCID object with the given putCode on the given path. + * + * @param accessToken the access token + * @param orcid the orcid id + * @param putCode the put code of the resource to delete + * @param path the path of the resource to delete + * @return the orcid response if no error occurs + * @throws OrcidClientException if some error occurs during the search + */ + OrcidResponse deleteByPutCode(String accessToken, String orcid, String putCode, String path); + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClientImpl.java b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClientImpl.java new file mode 100644 index 000000000000..3e7ca7b21029 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidClientImpl.java @@ -0,0 +1,394 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.client; + +import static org.apache.http.client.methods.RequestBuilder.delete; +import static org.apache.http.client.methods.RequestBuilder.get; +import static org.apache.http.client.methods.RequestBuilder.post; +import static org.apache.http.client.methods.RequestBuilder.put; + +import java.io.IOException; +import java.io.StringWriter; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import javax.xml.bind.JAXBContext; +import javax.xml.bind.JAXBException; +import javax.xml.bind.Marshaller; +import javax.xml.bind.Unmarshaller; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamReader; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.io.IOUtils; +import org.apache.http.Header; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.HttpStatus; +import org.apache.http.NameValuePair; +import org.apache.http.client.HttpClient; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.client.methods.RequestBuilder; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.message.BasicNameValuePair; +import org.dspace.orcid.exception.OrcidClientException; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.util.ThrowingSupplier; +import org.orcid.jaxb.model.v3.release.record.Address; +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.Keyword; +import org.orcid.jaxb.model.v3.release.record.OtherName; +import org.orcid.jaxb.model.v3.release.record.Person; +import org.orcid.jaxb.model.v3.release.record.PersonExternalIdentifier; +import org.orcid.jaxb.model.v3.release.record.ResearcherUrl; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkBulk; +import org.orcid.jaxb.model.v3.release.record.summary.Works; + +/** + * Implementation of {@link OrcidClient}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidClientImpl implements OrcidClient { + + /** + * Mapping between ORCID JAXB models and the sub-paths on ORCID API. + */ + private static final Map, String> PATHS_MAP = initializePathsMap(); + + private final OrcidConfiguration orcidConfiguration; + + private final ObjectMapper objectMapper; + + public OrcidClientImpl(OrcidConfiguration orcidConfiguration) { + this.orcidConfiguration = orcidConfiguration; + this.objectMapper = new ObjectMapper(); + } + + private static Map, String> initializePathsMap() { + Map, String> map = new HashMap, String>(); + map.put(Work.class, OrcidEntityType.PUBLICATION.getPath()); + map.put(Funding.class, OrcidEntityType.FUNDING.getPath()); + map.put(Address.class, OrcidProfileSectionType.COUNTRY.getPath()); + map.put(OtherName.class, OrcidProfileSectionType.OTHER_NAMES.getPath()); + map.put(ResearcherUrl.class, OrcidProfileSectionType.RESEARCHER_URLS.getPath()); + map.put(PersonExternalIdentifier.class, OrcidProfileSectionType.EXTERNAL_IDS.getPath()); + map.put(Keyword.class, OrcidProfileSectionType.KEYWORDS.getPath()); + return map; + } + + @Override + public OrcidTokenResponseDTO getAccessToken(String code) { + + List params = new ArrayList(); + params.add(new BasicNameValuePair("code", code)); + params.add(new BasicNameValuePair("grant_type", "authorization_code")); + params.add(new BasicNameValuePair("client_id", orcidConfiguration.getClientId())); + params.add(new BasicNameValuePair("client_secret", orcidConfiguration.getClientSecret())); + + HttpUriRequest httpUriRequest = RequestBuilder.post(orcidConfiguration.getTokenEndpointUrl()) + .addHeader("Content-Type", "application/x-www-form-urlencoded") + .addHeader("Accept", "application/json") + .setEntity(new UrlEncodedFormEntity(params, Charset.defaultCharset())) + .build(); + + return executeAndParseJson(httpUriRequest, OrcidTokenResponseDTO.class); + + } + + @Override + public Person getPerson(String accessToken, String orcid) { + HttpUriRequest httpUriRequest = buildGetUriRequest(accessToken, "/" + orcid + "/person"); + return executeAndUnmarshall(httpUriRequest, false, Person.class); + } + + @Override + public Works getWorks(String accessToken, String orcid) { + HttpUriRequest httpUriRequest = buildGetUriRequest(accessToken, "/" + orcid + "/works"); + Works works = executeAndUnmarshall(httpUriRequest, true, Works.class); + return works != null ? works : new Works(); + } + + @Override + public Works getWorks(String orcid) { + HttpUriRequest httpUriRequest = buildGetUriRequestToPublicEndpoint("/" + orcid + "/works"); + Works works = executeAndUnmarshall(httpUriRequest, true, Works.class); + return works != null ? works : new Works(); + } + + @Override + public WorkBulk getWorkBulk(String accessToken, String orcid, List putCodes) { + String putCode = String.join(",", putCodes); + HttpUriRequest httpUriRequest = buildGetUriRequest(accessToken, "/" + orcid + "/works/" + putCode); + WorkBulk workBulk = executeAndUnmarshall(httpUriRequest, true, WorkBulk.class); + return workBulk != null ? workBulk : new WorkBulk(); + } + + @Override + public WorkBulk getWorkBulk(String orcid, List putCodes) { + String putCode = String.join(",", putCodes); + HttpUriRequest httpUriRequest = buildGetUriRequestToPublicEndpoint("/" + orcid + "/works/" + putCode); + WorkBulk workBulk = executeAndUnmarshall(httpUriRequest, true, WorkBulk.class); + return workBulk != null ? workBulk : new WorkBulk(); + } + + @Override + public Optional getObject(String accessToken, String orcid, String putCode, Class clazz) { + String path = getOrcidPathFromOrcidObjectType(clazz); + HttpUriRequest httpUriRequest = buildGetUriRequest(accessToken, "/" + orcid + path + "/" + putCode); + return Optional.ofNullable(executeAndUnmarshall(httpUriRequest, true, clazz)); + } + + @Override + public Optional getObject(String orcid, String putCode, Class clazz) { + String path = getOrcidPathFromOrcidObjectType(clazz); + HttpUriRequest httpUriRequest = buildGetUriRequestToPublicEndpoint("/" + orcid + path + "/" + putCode); + return Optional.ofNullable(executeAndUnmarshall(httpUriRequest, true, clazz)); + } + + @Override + public OrcidResponse push(String accessToken, String orcid, Object object) { + String path = getOrcidPathFromOrcidObjectType(object.getClass()); + return execute(buildPostUriRequest(accessToken, "/" + orcid + path, object), false); + } + + @Override + public OrcidResponse update(String accessToken, String orcid, Object object, String putCode) { + String path = getOrcidPathFromOrcidObjectType(object.getClass()); + return execute(buildPutUriRequest(accessToken, "/" + orcid + path + "/" + putCode, object), false); + } + + @Override + public OrcidResponse deleteByPutCode(String accessToken, String orcid, String putCode, String path) { + return execute(buildDeleteUriRequest(accessToken, "/" + orcid + path + "/" + putCode), true); + } + + @Override + public OrcidTokenResponseDTO getReadPublicAccessToken() { + return getClientCredentialsAccessToken("/read-public"); + } + + private OrcidTokenResponseDTO getClientCredentialsAccessToken(String scope) { + List params = new ArrayList(); + params.add(new BasicNameValuePair("scope", scope)); + params.add(new BasicNameValuePair("grant_type", "client_credentials")); + params.add(new BasicNameValuePair("client_id", orcidConfiguration.getClientId())); + params.add(new BasicNameValuePair("client_secret", orcidConfiguration.getClientSecret())); + + HttpUriRequest httpUriRequest = RequestBuilder.post(orcidConfiguration.getTokenEndpointUrl()) + .addHeader("Content-Type", "application/x-www-form-urlencoded") + .addHeader("Accept", "application/json") + .setEntity(new UrlEncodedFormEntity(params, Charset.defaultCharset())) + .build(); + + return executeAndParseJson(httpUriRequest, OrcidTokenResponseDTO.class); + } + + private HttpUriRequest buildGetUriRequest(String accessToken, String relativePath) { + return get(orcidConfiguration.getApiUrl() + relativePath.trim()) + .addHeader("Content-Type", "application/x-www-form-urlencoded") + .addHeader("Authorization", "Bearer " + accessToken) + .build(); + } + + private HttpUriRequest buildGetUriRequestToPublicEndpoint(String relativePath) { + return get(orcidConfiguration.getPublicUrl() + relativePath.trim()) + .addHeader("Content-Type", "application/x-www-form-urlencoded") + .build(); + } + + private HttpUriRequest buildPostUriRequest(String accessToken, String relativePath, Object object) { + return post(orcidConfiguration.getApiUrl() + relativePath.trim()) + .addHeader("Content-Type", "application/vnd.orcid+xml") + .addHeader("Authorization", "Bearer " + accessToken) + .setEntity(convertToEntity(object)) + .build(); + } + + private HttpUriRequest buildPutUriRequest(String accessToken, String relativePath, Object object) { + return put(orcidConfiguration.getApiUrl() + relativePath.trim()) + .addHeader("Content-Type", "application/vnd.orcid+xml") + .addHeader("Authorization", "Bearer " + accessToken) + .setEntity(convertToEntity(object)) + .build(); + } + + private HttpUriRequest buildDeleteUriRequest(String accessToken, String relativePath) { + return delete(orcidConfiguration.getApiUrl() + relativePath.trim()) + .addHeader("Authorization", "Bearer " + accessToken) + .build(); + } + + private T executeAndParseJson(HttpUriRequest httpUriRequest, Class clazz) { + + HttpClient client = HttpClientBuilder.create().build(); + + return executeAndReturns(() -> { + + HttpResponse response = client.execute(httpUriRequest); + + if (isNotSuccessfull(response)) { + throw new OrcidClientException(getStatusCode(response), formatErrorMessage(response)); + } + + return objectMapper.readValue(response.getEntity().getContent(), clazz); + + }); + + } + + /** + * Execute the given httpUriRequest, unmarshalling the content with the given + * class. + * @param httpUriRequest the http request to be executed + * @param handleNotFoundAsNull if true this method returns null if the response + * status is 404, if false throws an + * OrcidClientException + * @param clazz the class to be used for the content unmarshall + * @return the response body + * @throws OrcidClientException if the incoming response is not successfull + */ + private T executeAndUnmarshall(HttpUriRequest httpUriRequest, boolean handleNotFoundAsNull, Class clazz) { + + HttpClient client = HttpClientBuilder.create().build(); + + return executeAndReturns(() -> { + + HttpResponse response = client.execute(httpUriRequest); + + if (handleNotFoundAsNull && isNotFound(response)) { + return null; + } + + if (isNotSuccessfull(response)) { + throw new OrcidClientException(getStatusCode(response), formatErrorMessage(response)); + } + + return unmarshall(response.getEntity(), clazz); + + }); + } + + private OrcidResponse execute(HttpUriRequest httpUriRequest, boolean handleNotFoundAsNull) { + HttpClient client = HttpClientBuilder.create().build(); + + return executeAndReturns(() -> { + + HttpResponse response = client.execute(httpUriRequest); + + if (handleNotFoundAsNull && isNotFound(response)) { + return new OrcidResponse(getStatusCode(response), null, getContent(response)); + } + + if (isNotSuccessfull(response)) { + throw new OrcidClientException(getStatusCode(response), formatErrorMessage(response)); + } + + return new OrcidResponse(getStatusCode(response), getPutCode(response), getContent(response)); + + }); + } + + private T executeAndReturns(ThrowingSupplier supplier) { + try { + return supplier.get(); + } catch (OrcidClientException ex) { + throw ex; + } catch (Exception ex) { + throw new OrcidClientException(ex); + } + } + + private String marshall(Object object) throws JAXBException { + JAXBContext jaxbContext = JAXBContext.newInstance(object.getClass()); + Marshaller marshaller = jaxbContext.createMarshaller(); + marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE); + StringWriter stringWriter = new StringWriter(); + marshaller.marshal(object, stringWriter); + return stringWriter.toString(); + } + + @SuppressWarnings("unchecked") + private T unmarshall(HttpEntity entity, Class clazz) throws Exception { + JAXBContext jaxbContext = JAXBContext.newInstance(clazz); + XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory(); + xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false); + XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(entity.getContent()); + Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); + return (T) unmarshaller.unmarshal(xmlStreamReader); + } + + private HttpEntity convertToEntity(Object object) { + try { + return new StringEntity(marshall(object), StandardCharsets.UTF_8); + } catch (JAXBException ex) { + throw new IllegalArgumentException("The given object cannot be sent to ORCID", ex); + } + } + + private String formatErrorMessage(HttpResponse response) { + try { + return IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset()); + } catch (UnsupportedOperationException | IOException e) { + return "Generic error"; + } + } + + private boolean isNotSuccessfull(HttpResponse response) { + int statusCode = getStatusCode(response); + return statusCode < 200 || statusCode > 299; + } + + private boolean isNotFound(HttpResponse response) { + return getStatusCode(response) == HttpStatus.SC_NOT_FOUND; + } + + private int getStatusCode(HttpResponse response) { + return response.getStatusLine().getStatusCode(); + } + + private String getOrcidPathFromOrcidObjectType(Class clazz) { + String path = PATHS_MAP.get(clazz); + if (path == null) { + throw new IllegalArgumentException("The given class is not an ORCID object's class: " + clazz); + } + return path; + } + + private String getContent(HttpResponse response) throws UnsupportedOperationException, IOException { + HttpEntity entity = response.getEntity(); + return entity != null ? IOUtils.toString(entity.getContent(), StandardCharsets.UTF_8.name()) : null; + } + + /** + * Returns the put code present in the given http response, if any. For more + * details about the put code see For more details see + * https://info.orcid.org/faq/what-is-a-put-code/ + * @param response the http response coming from ORCID + * @return the put code, if any + */ + private String getPutCode(HttpResponse response) { + Header[] headers = response.getHeaders("Location"); + if (headers.length == 0) { + return null; + } + String value = headers[0].getValue(); + return value.substring(value.lastIndexOf("/") + 1); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/client/OrcidConfiguration.java b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidConfiguration.java new file mode 100644 index 000000000000..550b0215c435 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidConfiguration.java @@ -0,0 +1,114 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.client; + +import org.apache.commons.lang3.StringUtils; + +/** + * A class that contains all the configurations related to ORCID. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public final class OrcidConfiguration { + + private String apiUrl; + + private String publicUrl; + + private String domainUrl; + + private String redirectUrl; + + private String clientId; + + private String clientSecret; + + private String tokenEndpointUrl; + + private String authorizeEndpointUrl; + + private String scopes; + + public String getApiUrl() { + return apiUrl; + } + + public void setApiUrl(String apiUrl) { + this.apiUrl = apiUrl; + } + + public String getDomainUrl() { + return domainUrl; + } + + public void setDomainUrl(String domainUrl) { + this.domainUrl = domainUrl; + } + + public String getRedirectUrl() { + return redirectUrl; + } + + public void setRedirectUrl(String redirectUrl) { + this.redirectUrl = redirectUrl; + } + + public String getClientId() { + return clientId; + } + + public void setClientId(String clientId) { + this.clientId = clientId; + } + + public String getClientSecret() { + return clientSecret; + } + + public void setClientSecret(String clientSecret) { + this.clientSecret = clientSecret; + } + + public String getTokenEndpointUrl() { + return tokenEndpointUrl; + } + + public void setTokenEndpointUrl(String tokenEndpointUrl) { + this.tokenEndpointUrl = tokenEndpointUrl; + } + + public String getAuthorizeEndpointUrl() { + return authorizeEndpointUrl; + } + + public void setAuthorizeEndpointUrl(String authorizeEndpointUrl) { + this.authorizeEndpointUrl = authorizeEndpointUrl; + } + + public void setScopes(String scopes) { + this.scopes = scopes; + } + + public String[] getScopes() { + return StringUtils.isNotBlank(scopes) ? StringUtils.split(scopes, ",") : new String[] {}; + } + + public String getPublicUrl() { + return publicUrl; + } + + public void setPublicUrl(String publicUrl) { + this.publicUrl = publicUrl; + } + + public boolean isApiConfigured() { + return !StringUtils.isAnyBlank(clientId, clientSecret); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/client/OrcidResponse.java b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidResponse.java new file mode 100644 index 000000000000..ef0050cf2026 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/client/OrcidResponse.java @@ -0,0 +1,56 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.client; + +import org.apache.http.HttpStatus; + +/** + * Model a successfully response incoming from ORCID using {@link OrcidClient}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public final class OrcidResponse { + + private final int status; + + private final String putCode; + + private final String content; + + /** + * Create an ORCID response instance with the specified HTTP status, putCode and + * content. + * + * @param status the HTTP status incoming from ORCID + * @param putCode the identifier of the resource ORCID side + * @param content the response body content + */ + public OrcidResponse(int status, String putCode, String content) { + this.status = status; + this.putCode = putCode; + this.content = content; + } + + public int getStatus() { + return status; + } + + public String getPutCode() { + return putCode; + } + + public String getContent() { + return content; + } + + public boolean isNotFoundStatus() { + return status == HttpStatus.SC_NOT_FOUND; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java b/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java new file mode 100644 index 000000000000..d177e61607f1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/consumer/OrcidQueueConsumer.java @@ -0,0 +1,358 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.consumer; + +import static java.util.Arrays.asList; +import static java.util.Comparator.comparing; +import static java.util.Comparator.naturalOrder; +import static java.util.Comparator.nullsFirst; +import static org.apache.commons.collections.CollectionUtils.isNotEmpty; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.Relationship; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.core.Context; +import org.dspace.event.Consumer; +import org.dspace.event.Event; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.factory.OrcidProfileSectionFactory; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidProfileSectionFactoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The consumer to fill the ORCID queue. The addition to the queue is made for + * all archived items that meet one of these conditions: + *
      + *
    • are profiles already linked to orcid that have some modified sections to + * be synchronized (based on the preferences set by the user)
    • + *
    • are publications/fundings related to profile items linked to orcid (based + * on the preferences set by the user)
    • + * + *
    + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidQueueConsumer implements Consumer { + + private static final Logger LOGGER = LoggerFactory.getLogger(OrcidQueueConsumer.class); + + private OrcidQueueService orcidQueueService; + + private OrcidHistoryService orcidHistoryService; + + private OrcidTokenService orcidTokenService; + + private OrcidSynchronizationService orcidSynchronizationService; + + private ItemService itemService; + + private OrcidProfileSectionFactoryService profileSectionFactoryService; + + private ConfigurationService configurationService; + + private RelationshipService relationshipService; + + private List alreadyConsumedItems = new ArrayList<>(); + + @Override + public void initialize() throws Exception { + + OrcidServiceFactory orcidServiceFactory = OrcidServiceFactory.getInstance(); + + this.orcidQueueService = orcidServiceFactory.getOrcidQueueService(); + this.orcidHistoryService = orcidServiceFactory.getOrcidHistoryService(); + this.orcidSynchronizationService = orcidServiceFactory.getOrcidSynchronizationService(); + this.orcidTokenService = orcidServiceFactory.getOrcidTokenService(); + this.profileSectionFactoryService = orcidServiceFactory.getOrcidProfileSectionFactoryService(); + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + this.relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + + this.itemService = ContentServiceFactory.getInstance().getItemService(); + } + + @Override + public void consume(Context context, Event event) throws Exception { + + if (isOrcidSynchronizationDisabled()) { + return; + } + + DSpaceObject dso = event.getSubject(context); + if (!(dso instanceof Item)) { + return; + } + + Item item = (Item) dso; + if (!item.isArchived()) { + return; + } + + if (alreadyConsumedItems.contains(item.getID())) { + return; + } + + context.turnOffAuthorisationSystem(); + try { + consumeItem(context, item); + } finally { + context.restoreAuthSystemState(); + } + + } + + /** + * Consume the item if it is a profile or an ORCID entity. + */ + private void consumeItem(Context context, Item item) throws SQLException { + + String entityType = itemService.getEntityTypeLabel(item); + if (entityType == null) { + return; + } + + if (OrcidEntityType.isValidEntityType(entityType)) { + consumeEntity(context, item); + } else if (entityType.equals(getProfileType())) { + consumeProfile(context, item); + } + + alreadyConsumedItems.add(item.getID()); + + } + + /** + * Search for all related items to the given entity and create a new ORCID queue + * record if one of this is a profile linked with ORCID and the entity item must + * be synchronized with ORCID. + */ + private void consumeEntity(Context context, Item entity) throws SQLException { + + List relatedItems = findAllRelatedItems(context, entity); + + for (Item relatedItem : relatedItems) { + + if (isNotProfileItem(relatedItem) || isNotLinkedToOrcid(context, relatedItem)) { + continue; + } + + if (shouldNotBeSynchronized(relatedItem, entity) || isAlreadyQueued(context, relatedItem, entity)) { + continue; + } + + orcidQueueService.create(context, relatedItem, entity); + + } + + } + + private List findAllRelatedItems(Context context, Item entity) throws SQLException { + return relationshipService.findByItem(context, entity).stream() + .map(relationship -> getRelatedItem(entity, relationship)) + .collect(Collectors.toList()); + } + + private Item getRelatedItem(Item item, Relationship relationship) { + return item.equals(relationship.getLeftItem()) ? relationship.getRightItem() : relationship.getLeftItem(); + } + + /** + * If the given profile item is linked with ORCID recalculate all the ORCID + * queue records of the configured profile sections that can be synchronized. + */ + private void consumeProfile(Context context, Item item) throws SQLException { + + if (isNotLinkedToOrcid(context, item)) { + return; + } + + for (OrcidProfileSectionFactory factory : getAllProfileSectionFactories(item)) { + + String sectionType = factory.getProfileSectionType().name(); + + orcidQueueService.deleteByEntityAndRecordType(context, item, sectionType); + + if (isProfileSectionSynchronizationDisabled(context, item, factory)) { + continue; + } + + List signatures = factory.getMetadataSignatures(context, item); + List historyRecords = findSuccessfullyOrcidHistoryRecords(context, item, sectionType); + + createInsertionRecordForNewSignatures(context, item, historyRecords, factory, signatures); + createDeletionRecordForNoMorePresentSignatures(context, item, historyRecords, factory, signatures); + + } + + } + + private boolean isProfileSectionSynchronizationDisabled(Context context, + Item item, OrcidProfileSectionFactory factory) { + List preferences = this.orcidSynchronizationService.getProfilePreferences(item); + return !preferences.contains(factory.getSynchronizationPreference()); + } + + /** + * Add new INSERTION record in the ORCID queue based on the metadata signatures + * calculated from the current item state. + */ + private void createInsertionRecordForNewSignatures(Context context, Item item, List historyRecords, + OrcidProfileSectionFactory factory, List signatures) throws SQLException { + + String sectionType = factory.getProfileSectionType().name(); + + for (String signature : signatures) { + + if (isNotAlreadySynchronized(historyRecords, signature)) { + String description = factory.getDescription(context, item, signature); + orcidQueueService.createProfileInsertionRecord(context, item, description, sectionType, signature); + } + + } + + } + + /** + * Add new DELETION records in the ORCID queue for metadata signature presents + * in the ORCID history no more present in the metadata signatures calculated + * from the current item state. + */ + private void createDeletionRecordForNoMorePresentSignatures(Context context, Item profile, + List historyRecords, OrcidProfileSectionFactory factory, List signatures) + throws SQLException { + + String sectionType = factory.getProfileSectionType().name(); + + for (OrcidHistory historyRecord : historyRecords) { + String storedSignature = historyRecord.getMetadata(); + String putCode = historyRecord.getPutCode(); + String description = historyRecord.getDescription(); + + if (signatures.contains(storedSignature) || isAlreadyDeleted(historyRecords, historyRecord)) { + continue; + } + + if (StringUtils.isBlank(putCode)) { + LOGGER.warn("The orcid history record with id {} should have a not blank put code", + historyRecord.getID()); + continue; + } + + orcidQueueService.createProfileDeletionRecord(context, profile, description, + sectionType, storedSignature, putCode); + } + + } + + private List findSuccessfullyOrcidHistoryRecords(Context context, Item item, + String sectionType) throws SQLException { + return orcidHistoryService.findSuccessfullyRecordsByEntityAndType(context, item, sectionType); + } + + private boolean isNotAlreadySynchronized(List records, String signature) { + return getLastOperation(records, signature) + .map(operation -> operation == OrcidOperation.DELETE) + .orElse(Boolean.TRUE); + } + + private boolean isAlreadyDeleted(List records, OrcidHistory historyRecord) { + + if (historyRecord.getOperation() == OrcidOperation.DELETE) { + return true; + } + + return findDeletedHistoryRecordsBySignature(records, historyRecord.getMetadata()) + .anyMatch(record -> record.getTimestamp().after(historyRecord.getTimestamp())); + } + + private Stream findDeletedHistoryRecordsBySignature(List records, String signature) { + return records.stream() + .filter(record -> signature.equals(record.getMetadata())) + .filter(record -> record.getOperation() == OrcidOperation.DELETE); + } + + private Optional getLastOperation(List records, String signature) { + return records.stream() + .filter(record -> signature.equals(record.getMetadata())) + .sorted(comparing(OrcidHistory::getTimestamp, nullsFirst(naturalOrder())).reversed()) + .map(OrcidHistory::getOperation) + .findFirst(); + } + + private boolean isAlreadyQueued(Context context, Item profileItem, Item entity) throws SQLException { + return isNotEmpty(orcidQueueService.findByProfileItemAndEntity(context, profileItem, entity)); + } + + private boolean isNotLinkedToOrcid(Context context, Item profileItemItem) { + return hasNotOrcidAccessToken(context, profileItemItem) + || getMetadataValue(profileItemItem, "person.identifier.orcid") == null; + } + + private boolean hasNotOrcidAccessToken(Context context, Item profileItemItem) { + return orcidTokenService.findByProfileItem(context, profileItemItem) == null; + } + + private boolean shouldNotBeSynchronized(Item profileItem, Item entity) { + return !orcidSynchronizationService.isSynchronizationAllowed(profileItem, entity); + } + + private boolean isNotProfileItem(Item profileItemItem) { + return !getProfileType().equals(itemService.getEntityTypeLabel(profileItemItem)); + } + + private String getMetadataValue(Item item, String metadataField) { + return itemService.getMetadataFirstValue(item, new MetadataFieldName(metadataField), Item.ANY); + } + + private List getAllProfileSectionFactories(Item item) { + return this.profileSectionFactoryService.findByPreferences(asList(OrcidProfileSyncPreference.values())); + } + + private String getProfileType() { + return configurationService.getProperty("researcher-profile.entity-type", "Person"); + } + + private boolean isOrcidSynchronizationDisabled() { + return !configurationService.getBooleanProperty("orcid.synchronization-enabled", true); + } + + @Override + public void end(Context context) throws Exception { + alreadyConsumedItems.clear(); + } + + @Override + public void finish(Context context) throws Exception { + // nothing to do + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidHistoryDAO.java b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidHistoryDAO.java new file mode 100644 index 000000000000..9e82f3c51dee --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidHistoryDAO.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.GenericDAO; +import org.dspace.orcid.OrcidHistory; + +/** + * Database Access Object interface class for the OrcidHistory object. The + * implementation of this class is responsible for all database calls for the + * OrcidHistory object and is autowired by spring. This class should only be + * accessed from a single service and should never be exposed outside of the API + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidHistoryDAO extends GenericDAO { + + /** + * Find all the ORCID history records by the given profileItem and entity uuids. + * + * @param context the DSpace context + * @param profileItemId the profileItem item uuid + * @param entityId the entity item uuid + * @return the records list + * @throws SQLException if an SQL error occurs + */ + List findByProfileItemAndEntity(Context context, UUID profileItemId, UUID entityId) + throws SQLException; + + /** + * Get the OrcidHistory records where the given item is the profileItem or the + * entity + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidHistory entities + * @throws SQLException if database error + */ + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + + /** + * Find the OrcidHistory records related to the given entity item. + * + * @param context DSpace context object + * @param entity the entity item + * @return the found put codes + * @throws SQLException if database error + */ + List findByEntity(Context context, Item entity) throws SQLException; + + /** + * Find all the successfully Orcid history records with the given record type + * related to the given entity. An history record is considered successful if + * the status is between 200 and 300. + * + * @param context DSpace context object + * @param entity the entity item + * @param recordType the record type + * @return the found orcid history records + * @throws SQLException if database error + */ + List findSuccessfullyRecordsByEntityAndType(Context context, Item entity, + String recordType) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java new file mode 100644 index 000000000000..235443b15033 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidQueueDAO.java @@ -0,0 +1,107 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.GenericDAO; +import org.dspace.orcid.OrcidQueue; + +/** + * Database Access Object interface class for the OrcidQueue object. The + * implementation of this class is responsible for all database calls for the + * OrcidQueue object and is autowired by spring. This class should only be + * accessed from a single service and should never be exposed outside of the API + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidQueueDAO extends GenericDAO { + + /** + * Get the orcid queue records by the profileItem id. + * + * @param context DSpace context object + * @param profileItemId the profileItem item id + * @param limit limit + * @param offset offset + * @return the orcid queue records + * @throws SQLException if an SQL error occurs + */ + public List findByProfileItemId(Context context, UUID profileItemId, Integer limit, Integer offset) + throws SQLException; + + /** + * Count the orcid queue records with the same profileItemId. + * + * @param context DSpace context object + * @param profileItemId the profileItem item id + * @return the count result + * @throws SQLException if an SQL error occurs + */ + long countByProfileItemId(Context context, UUID profileItemId) throws SQLException; + + /** + * Returns all the orcid queue records with the given profileItem and entity + * items. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the found orcid queue records + * @throws SQLException + */ + public List findByProfileItemAndEntity(Context context, Item profileItem, Item entity) + throws SQLException; + + /** + * Get the OrcidQueue records where the given item is the profileItem OR the + * entity + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidHistory entities + * @throws SQLException if database error + */ + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + + /** + * Find all the OrcidQueue records with the given entity and record type. + * + * @param context DSpace context object + * @param entity the entity item + * @param type the record type + * @throws SQLException if database error occurs + */ + public List findByEntityAndRecordType(Context context, Item entity, String type) throws SQLException; + + /** + * Find all the OrcidQueue records with the given profileItem and record type. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param type the record type + * @throws SQLException if database error occurs + */ + public List findByProfileItemAndRecordType(Context context, Item profileItem, String type) + throws SQLException; + + /** + * Get all the OrcidQueue records with attempts less than the given attempts. + * + * @param context DSpace context object + * @param attempts the maximum value of attempts + * @return the found OrcidQueue records + * @throws SQLException if database error + */ + public List findByAttemptsLessThan(Context context, int attempts) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidTokenDAO.java b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidTokenDAO.java new file mode 100644 index 000000000000..00ec3dd2747e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/OrcidTokenDAO.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.GenericDAO; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidToken; + +/** + * Database Access Object interface class for the OrcidToken object. The + * implementation of this class is responsible for all database calls for the + * OrcidToken object and is autowired by spring. This class should only be + * accessed from a single service and should never be exposed outside of the API + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidTokenDAO extends GenericDAO { + + /** + * Find an OrcidToken by ePerson. + * + * @param context the DSpace context + * @param ePerson the ePerson to search for + * @return the Orcid token, if any + */ + public OrcidToken findByEPerson(Context context, EPerson ePerson); + + /** + * Find an OrcidToken by profileItem. + * + * @param context the DSpace context + * @param profileItem the profile item to search for + * @return the Orcid token, if any + */ + public OrcidToken findByProfileItem(Context context, Item profileItem); + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidHistoryDAOImpl.java b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidHistoryDAOImpl.java new file mode 100644 index 000000000000..0b2c7099ffac --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidHistoryDAOImpl.java @@ -0,0 +1,64 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao.impl; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; +import javax.persistence.Query; + +import org.dspace.content.Item; +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.dao.OrcidHistoryDAO; + +/** + * Implementation of {@link OrcidHistoryDAO}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@SuppressWarnings("unchecked") +public class OrcidHistoryDAOImpl extends AbstractHibernateDAO implements OrcidHistoryDAO { + + @Override + public List findByProfileItemAndEntity(Context context, UUID profileItemId, UUID entityId) + throws SQLException { + Query query = createQuery(context, + "FROM OrcidHistory WHERE profileItem.id = :profileItemId AND entity.id = :entityId "); + query.setParameter("profileItemId", profileItemId); + query.setParameter("entityId", entityId); + return query.getResultList(); + } + + @Override + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException { + Query query = createQuery(context, "FROM OrcidHistory WHERE profileItem.id = :itemId OR entity.id = :itemId"); + query.setParameter("itemId", item.getID()); + return query.getResultList(); + } + + @Override + public List findByEntity(Context context, Item entity) throws SQLException { + Query query = createQuery(context, "FROM OrcidHistory WHERE entity.id = :entityId "); + query.setParameter("entityId", entity.getID()); + return query.getResultList(); + } + + @Override + public List findSuccessfullyRecordsByEntityAndType(Context context, Item entity, + String recordType) throws SQLException { + Query query = createQuery(context, "FROM OrcidHistory WHERE entity = :entity AND recordType = :type " + + "AND status BETWEEN 200 AND 300"); + query.setParameter("entity", entity); + query.setParameter("type", recordType); + return query.getResultList(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java new file mode 100644 index 000000000000..2114b2535759 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidQueueDAOImpl.java @@ -0,0 +1,90 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao.impl; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; +import javax.persistence.Query; + +import org.dspace.content.Item; +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.dao.OrcidQueueDAO; + +/** + * Implementation of {@link OrcidQueueDAO}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@SuppressWarnings("unchecked") +public class OrcidQueueDAOImpl extends AbstractHibernateDAO implements OrcidQueueDAO { + + @Override + public List findByProfileItemId(Context context, UUID profileItemId, Integer limit, Integer offset) + throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE profileItem.id= :profileItemId"); + query.setParameter("profileItemId", profileItemId); + if (limit != null && limit.intValue() > 0) { + query.setMaxResults(limit); + } + query.setFirstResult(offset); + return query.getResultList(); + } + + @Override + public List findByProfileItemAndEntity(Context context, Item profileItem, Item entity) + throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE profileItem = :profileItem AND entity = :entity"); + query.setParameter("profileItem", profileItem); + query.setParameter("entity", entity); + return query.getResultList(); + } + + @Override + public long countByProfileItemId(Context context, UUID profileItemId) throws SQLException { + Query query = createQuery(context, + "SELECT COUNT(queue) FROM OrcidQueue queue WHERE profileItem.id= :profileItemId"); + query.setParameter("profileItemId", profileItemId); + return (long) query.getSingleResult(); + } + + @Override + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE profileItem.id= :itemId OR entity.id = :itemId"); + query.setParameter("itemId", item.getID()); + return query.getResultList(); + } + + @Override + public List findByEntityAndRecordType(Context context, Item entity, String type) throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE entity = :entity AND recordType = :type"); + query.setParameter("entity", entity); + query.setParameter("type", type); + return query.getResultList(); + } + + @Override + public List findByProfileItemAndRecordType(Context context, Item profileItem, String type) + throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE profileItem = :profileItem AND recordType = :type"); + query.setParameter("profileItem", profileItem); + query.setParameter("type", type); + return query.getResultList(); + } + + @Override + public List findByAttemptsLessThan(Context context, int attempts) throws SQLException { + Query query = createQuery(context, "FROM OrcidQueue WHERE attempts IS NULL OR attempts < :attempts"); + query.setParameter("attempts", attempts); + return query.getResultList(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidTokenDAOImpl.java b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidTokenDAOImpl.java new file mode 100644 index 000000000000..01b03fc35455 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/dao/impl/OrcidTokenDAOImpl.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.dao.impl; + +import java.sql.SQLException; +import javax.persistence.Query; + +import org.dspace.content.Item; +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.dao.OrcidTokenDAO; + +/** + * Implementation of {@link OrcidTokenDAO}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidTokenDAOImpl extends AbstractHibernateDAO implements OrcidTokenDAO { + + @Override + public OrcidToken findByEPerson(Context context, EPerson ePerson) { + try { + Query query = createQuery(context, "FROM OrcidToken WHERE ePerson = :ePerson"); + query.setParameter("ePerson", ePerson); + return singleResult(query); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public OrcidToken findByProfileItem(Context context, Item profileItem) { + try { + Query query = createQuery(context, "FROM OrcidToken WHERE profileItem = :profileItem"); + query.setParameter("profileItem", profileItem); + return singleResult(query); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidClientException.java b/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidClientException.java new file mode 100644 index 000000000000..9e78ef07b0c5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidClientException.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.exception; + +/** + * Exception throwable from class that implements {@link OrcidClient} in case of + * error response from the ORCID registry. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidClientException extends RuntimeException { + + public static final String INVALID_GRANT_MESSAGE = "invalid_grant"; + + private static final long serialVersionUID = -7618061110212398216L; + + private int status = 0; + + public OrcidClientException(int status, String content) { + super(content); + this.status = status; + } + + public OrcidClientException(Throwable cause) { + super(cause); + } + + public int getStatus() { + return this.status; + } + + /** + * Returns true if the exception is related to an invalid grant error + * (authentication code non valid), false otherwise + * + * @return the check result + */ + public boolean isInvalidGrantException() { + return getMessage() != null && getMessage().contains(INVALID_GRANT_MESSAGE); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidValidationException.java b/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidValidationException.java new file mode 100644 index 000000000000..bb35789ab951 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/exception/OrcidValidationException.java @@ -0,0 +1,52 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.exception; + +import java.util.List; +import java.util.stream.Collectors; + +import org.dspace.orcid.model.validator.OrcidValidationError; + +/** + * A Runtime exception that occurs when an ORCID object that must be send to + * ORCID is not valid. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidValidationException extends RuntimeException { + + private static final long serialVersionUID = 3377335341871311369L; + + private final List errors; + + public OrcidValidationException(OrcidValidationError error) { + this(List.of(error)); + } + + public OrcidValidationException(List errors) { + super("Errors occurs during ORCID object validation"); + this.errors = errors; + } + + public List getErrors() { + return errors; + } + + @Override + public String getMessage() { + return super.getMessage() + ". Error codes: " + formatErrors(); + } + + private String formatErrors() { + return errors.stream() + .map(error -> error.getCode()) + .collect(Collectors.joining(",")); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactory.java b/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactory.java new file mode 100644 index 000000000000..09f43229d642 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactory.java @@ -0,0 +1,54 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.factory; + +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.service.MetadataSignatureGenerator; +import org.dspace.orcid.service.OrcidEntityFactoryService; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidProfileSectionFactoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Abstract factory to get services for the orcid package, use + * OrcidHistoryServiceFactory.getInstance() to retrieve an implementation. + * + * @author Luca Giamminonni (luca.giamminonni at 4Science.it) + * + */ +public abstract class OrcidServiceFactory { + + public abstract OrcidHistoryService getOrcidHistoryService(); + + public abstract OrcidQueueService getOrcidQueueService(); + + public abstract OrcidSynchronizationService getOrcidSynchronizationService(); + + public abstract OrcidTokenService getOrcidTokenService(); + + public abstract OrcidProfileSectionFactoryService getOrcidProfileSectionFactoryService(); + + public abstract MetadataSignatureGenerator getMetadataSignatureGenerator(); + + public abstract OrcidEntityFactoryService getOrcidEntityFactoryService(); + + public abstract OrcidClient getOrcidClient(); + + public abstract OrcidConfiguration getOrcidConfiguration(); + + public static OrcidServiceFactory getInstance() { + return DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName( + "orcidServiceFactory", OrcidServiceFactory.class); + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactoryImpl.java new file mode 100644 index 000000000000..78972eba85f9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/factory/OrcidServiceFactoryImpl.java @@ -0,0 +1,105 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.factory; + +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.service.MetadataSignatureGenerator; +import org.dspace.orcid.service.OrcidEntityFactoryService; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidProfileSectionFactoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidServiceFactory}. + * + * @author Luca Giamminonni (luca.giamminonni at 4Science.it) + * + */ +public class OrcidServiceFactoryImpl extends OrcidServiceFactory { + + @Autowired + private OrcidHistoryService orcidHistoryService; + + @Autowired + private OrcidSynchronizationService orcidSynchronizationService; + + @Autowired + private OrcidQueueService orcidQueueService; + + @Autowired + private OrcidProfileSectionFactoryService orcidProfileSectionFactoryService; + + @Autowired + private OrcidEntityFactoryService orcidEntityFactoryService; + + @Autowired + private MetadataSignatureGenerator metadataSignatureGenerator; + + @Autowired + private OrcidClient orcidClient; + + @Autowired + private OrcidConfiguration orcidConfiguration; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Override + public OrcidHistoryService getOrcidHistoryService() { + return orcidHistoryService; + } + + @Override + public OrcidQueueService getOrcidQueueService() { + return orcidQueueService; + } + + @Override + public OrcidSynchronizationService getOrcidSynchronizationService() { + return orcidSynchronizationService; + } + + @Override + public OrcidProfileSectionFactoryService getOrcidProfileSectionFactoryService() { + return orcidProfileSectionFactoryService; + } + + @Override + public MetadataSignatureGenerator getMetadataSignatureGenerator() { + return metadataSignatureGenerator; + } + + @Override + public OrcidEntityFactoryService getOrcidEntityFactoryService() { + return orcidEntityFactoryService; + } + + @Override + public OrcidTokenService getOrcidTokenService() { + return orcidTokenService; + } + + @Override + public OrcidClient getOrcidClient() { + return orcidClient; + } + + @Override + public OrcidConfiguration getOrcidConfiguration() { + return orcidConfiguration; + } + + public void setOrcidClient(OrcidClient orcidClient) { + this.orcidClient = orcidClient; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/OrcidEntityType.java b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidEntityType.java new file mode 100644 index 000000000000..6b32818f7673 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidEntityType.java @@ -0,0 +1,75 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model; + +import java.util.Arrays; + +/** + * The types of activities defined on ORCID that can be synchronized. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidEntityType { + + /** + * The ORCID publication/work activity. + */ + PUBLICATION("Publication", "/work"), + + /** + * The ORCID funding activity. + */ + FUNDING("Project", "/funding"); + + /** + * The DSpace entity type. + */ + private final String entityType; + + /** + * The subpath of the activity on ORCID API. + */ + private final String path; + + private OrcidEntityType(String entityType, String path) { + this.entityType = entityType; + this.path = path; + } + + public String getEntityType() { + return entityType; + } + + public String getPath() { + return path; + } + + /** + * Check if the given DSpace entity type is valid. + * @param entityType the entity type to check + * @return true if valid, false otherwise + */ + public static boolean isValidEntityType(String entityType) { + return Arrays.stream(OrcidEntityType.values()) + .anyMatch(orcidEntityType -> orcidEntityType.getEntityType().equalsIgnoreCase(entityType)); + } + + /** + * Returns an ORCID entity type from a DSpace entity type. + * + * @param entityType the DSpace entity type to search for + * @return the ORCID entity type, if any + */ + public static OrcidEntityType fromEntityType(String entityType) { + return Arrays.stream(OrcidEntityType.values()) + .filter(orcidEntityType -> orcidEntityType.getEntityType().equalsIgnoreCase(entityType)) + .findFirst() + .orElse(null); + } +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/OrcidFundingFieldMapping.java b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidFundingFieldMapping.java new file mode 100644 index 000000000000..1a8333058a1e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidFundingFieldMapping.java @@ -0,0 +1,209 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model; + +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; +import static org.dspace.orcid.model.factory.OrcidFactoryUtils.parseConfigurations; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.dspace.orcid.model.factory.OrcidFactoryUtils; +import org.dspace.util.SimpleMapConverter; +import org.orcid.jaxb.model.common.FundingContributorRole; + +/** + * Class that contains all the mapping between {@link Funding} and DSpace + * metadata fields. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidFundingFieldMapping { + + /** + * The metadata fields related to the funding contributors. + */ + private Map contributorFields; + + /** + * The metadata fields related to the funding external identifiers. + */ + private Map externalIdentifierFields; + + /** + * The metadata field related to the funding title. + */ + private String titleField; + + /** + * The metadata field related to the funding type. + */ + private String typeField; + + /** + * The funding type converter. + */ + private SimpleMapConverter typeConverter; + + /** + * The metadata field related to the funding amount. + */ + private String amountField; + + /** + * The metadata field related to the funding amount's currency. + */ + private String amountCurrencyField; + + /** + * The funding amount's currency converter. + */ + private SimpleMapConverter amountCurrencyConverter; + + /** + * The metadata field related to the funding start date. + */ + private String startDateField; + + /** + * The metadata field related to the funding end date. + */ + private String endDateField; + + /** + * The metadata field related to the funding description. + */ + private String descriptionField; + + /** + * The type of the relationship between the funding and the organization. + */ + private String organizationRelationshipType; + + private Map parseContributors(String contributors) { + Map contributorsMap = parseConfigurations(contributors); + return contributorsMap.keySet().stream() + .collect(toMap(identity(), field -> parseContributorRole(contributorsMap.get(field)))); + } + + private FundingContributorRole parseContributorRole(String contributorRole) { + try { + return FundingContributorRole.fromValue(contributorRole); + } catch (IllegalArgumentException ex) { + throw new IllegalArgumentException("The funding contributor role " + contributorRole + + " is invalid, allowed values are " + getAllowedContributorRoles(), ex); + } + } + + private List getAllowedContributorRoles() { + return Arrays.asList(FundingContributorRole.values()).stream() + .map(FundingContributorRole::value) + .collect(Collectors.toList()); + } + + public Map getExternalIdentifierFields() { + return externalIdentifierFields; + } + + public void setExternalIdentifierFields(String externalIdentifierFields) { + this.externalIdentifierFields = OrcidFactoryUtils.parseConfigurations(externalIdentifierFields); + } + + public Map getContributorFields() { + return contributorFields; + } + + public void setContributorFields(String contributorFields) { + this.contributorFields = parseContributors(contributorFields); + } + + public String getTitleField() { + return titleField; + } + + public void setTitleField(String titleField) { + this.titleField = titleField; + } + + public String getStartDateField() { + return startDateField; + } + + public void setStartDateField(String startDateField) { + this.startDateField = startDateField; + } + + public String getEndDateField() { + return endDateField; + } + + public void setEndDateField(String endDateField) { + this.endDateField = endDateField; + } + + public String getDescriptionField() { + return descriptionField; + } + + public void setDescriptionField(String descriptionField) { + this.descriptionField = descriptionField; + } + + public String getOrganizationRelationshipType() { + return organizationRelationshipType; + } + + public void setOrganizationRelationshipType(String organizationRelationshipType) { + this.organizationRelationshipType = organizationRelationshipType; + } + + public String getTypeField() { + return typeField; + } + + public void setTypeField(String typeField) { + this.typeField = typeField; + } + + public String getAmountField() { + return amountField; + } + + public void setAmountField(String amountField) { + this.amountField = amountField; + } + + public String getAmountCurrencyField() { + return amountCurrencyField; + } + + public void setAmountCurrencyField(String amountCurrencyField) { + this.amountCurrencyField = amountCurrencyField; + } + + public String convertAmountCurrency(String currency) { + return amountCurrencyConverter != null ? amountCurrencyConverter.getValue(currency) : currency; + } + + public void setAmountCurrencyConverter(SimpleMapConverter amountCurrencyConverter) { + this.amountCurrencyConverter = amountCurrencyConverter; + } + + public String convertType(String type) { + return typeConverter != null ? typeConverter.getValue(type) : type; + } + + public void setTypeConverter(SimpleMapConverter typeConverter) { + this.typeConverter = typeConverter; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/OrcidProfileSectionType.java b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidProfileSectionType.java new file mode 100644 index 000000000000..7521844d2db1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidProfileSectionType.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model; + +import org.apache.commons.lang3.EnumUtils; + +/** + * Enum that model all the ORCID profile sections that could be synchronized. + * These fields come from the ORCID PERSON schema, see + * https://info.orcid.org/documentation/integration-guide/orcid-record/#PERSON + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidProfileSectionType { + + OTHER_NAMES("/other-names"), + COUNTRY("/address"), + KEYWORDS("/keywords"), + EXTERNAL_IDS("/external-identifiers"), + RESEARCHER_URLS("/researcher-urls"); + + private final String path; + + private OrcidProfileSectionType(String path) { + this.path = path; + } + + public String getPath() { + return path; + } + + public static boolean isValid(String type) { + return type != null ? EnumUtils.isValidEnum(OrcidProfileSectionType.class, type.toUpperCase()) : false; + } + + public static OrcidProfileSectionType fromString(String type) { + return isValid(type) ? OrcidProfileSectionType.valueOf(type.toUpperCase()) : null; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/OrcidTokenResponseDTO.java b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidTokenResponseDTO.java new file mode 100644 index 000000000000..6b3594f9b814 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidTokenResponseDTO.java @@ -0,0 +1,135 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.commons.lang3.StringUtils; + +/** + * This class map the response from and ORCID token endpoint. + * + * Response example: + * + * { + * "access_token":"f5af9f51-07e6-4332-8f1a-c0c11c1e3728", + * "token_type":"bearer", + * "refresh_token":"f725f747-3a65-49f6-a231-3e8944ce464d", + * "expires_in":631138518, + * "scope":"/read-limited", + * "name":"Sofia Garcia", + * "orcid":"0000-0001-2345-6789" + * } + * + * @author Luca Giamminonni (luca.giamminonni at 4Science.it) + * + */ +public class OrcidTokenResponseDTO { + + /** + * The access token release by the authorization server this is the most + * relevant item, because it allow the server to access to the user resources as + * defined in the scopes. + */ + @JsonProperty("access_token") + private String accessToken; + + /** + * The refresh token as defined in the OAuth standard. + */ + @JsonProperty("refresh_token") + private String refreshToken; + + /** + * It will be "bearer". + */ + @JsonProperty("token_type") + private String tokenType; + + /** + * The expiration timestamp in millis. + */ + @JsonProperty("expires_in") + private int expiresIn; + + /** + * List of scopes. + */ + private String scope; + + /** + * The ORCID user name. + */ + private String name; + + /** + * The ORCID user id. + */ + private String orcid; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getOrcid() { + return orcid; + } + + public void setOrcid(String orcid) { + this.orcid = orcid; + } + + public String getAccessToken() { + return accessToken; + } + + public void setAccessToken(String accessToken) { + this.accessToken = accessToken; + } + + public String getRefreshToken() { + return refreshToken; + } + + public void setRefreshToken(String refreshToken) { + this.refreshToken = refreshToken; + } + + public String getTokenType() { + return tokenType; + } + + public void setTokenType(String tokenType) { + this.tokenType = tokenType; + } + + public int getExpiresIn() { + return expiresIn; + } + + public void setExpiresIn(int expiresIn) { + this.expiresIn = expiresIn; + } + + public String getScope() { + return scope; + } + + public void setScope(String scope) { + this.scope = scope; + } + + @JsonIgnore + public String[] getScopeAsArray() { + return StringUtils.isEmpty(getScope()) ? new String[] {} : getScope().split(" "); + } +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/OrcidWorkFieldMapping.java b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidWorkFieldMapping.java new file mode 100644 index 000000000000..781a9dcbd904 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/OrcidWorkFieldMapping.java @@ -0,0 +1,197 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model; + +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; +import static org.dspace.orcid.model.factory.OrcidFactoryUtils.parseConfigurations; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.dspace.util.SimpleMapConverter; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.v3.release.record.Work; + +/** + * Class that contains all the mapping between {@link Work} and DSpace metadata + * fields. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidWorkFieldMapping { + + /** + * The metadata fields related to the work contributors. + */ + private Map contributorFields = new HashMap<>(); + + /** + * The metadata fields related to the work external identifiers. + */ + private Map externalIdentifierFields = new HashMap<>(); + + /** + * The metadata field related to the work publication date. + */ + private String publicationDateField; + + /** + * The metadata field related to the work title. + */ + private String titleField; + + /** + * The metadata field related to the work type. + */ + private String typeField; + + /** + * The metadata field related to the work journal title. + */ + private String journalTitleField; + + /** + * The metadata field related to the work description. + */ + private String shortDescriptionField; + + /** + * The metadata field related to the work language. + */ + private String languageField; + + /** + * The metadata field related to the work sub title. + */ + private String subTitleField; + + /** + * The work type converter. + */ + private SimpleMapConverter typeConverter; + + /** + * The work language converter. + */ + private SimpleMapConverter languageConverter; + + public String convertType(String type) { + return typeConverter != null ? typeConverter.getValue(type) : type; + } + + public String convertLanguage(String language) { + return languageConverter != null ? languageConverter.getValue(language) : language; + } + + public String getTitleField() { + return titleField; + } + + public void setTitleField(String titleField) { + this.titleField = titleField; + } + + public String getTypeField() { + return typeField; + } + + public void setTypeField(String typeField) { + this.typeField = typeField; + } + + public void setTypeConverter(SimpleMapConverter typeConverter) { + this.typeConverter = typeConverter; + } + + public Map getContributorFields() { + return contributorFields; + } + + public void setContributorFields(String contributorFields) { + this.contributorFields = parseContributors(contributorFields); + } + + public Map getExternalIdentifierFields() { + return externalIdentifierFields; + } + + public void setExternalIdentifierFields(String externalIdentifierFields) { + this.externalIdentifierFields = parseConfigurations(externalIdentifierFields); + } + + public String getPublicationDateField() { + return publicationDateField; + } + + public void setPublicationDateField(String publicationDateField) { + this.publicationDateField = publicationDateField; + } + + public String getJournalTitleField() { + return journalTitleField; + } + + public void setJournalTitleField(String journalTitleField) { + this.journalTitleField = journalTitleField; + } + + public String getShortDescriptionField() { + return shortDescriptionField; + } + + public void setShortDescriptionField(String shortDescriptionField) { + this.shortDescriptionField = shortDescriptionField; + } + + public String getLanguageField() { + return languageField; + } + + public void setLanguageField(String languageField) { + this.languageField = languageField; + } + + public void setLanguageConverter(SimpleMapConverter languageConverter) { + this.languageConverter = languageConverter; + } + + public String getSubTitleField() { + return subTitleField; + } + + public void setSubTitleField(String subTitleField) { + this.subTitleField = subTitleField; + } + + private Map parseContributors(String contributors) { + Map contributorsMap = parseConfigurations(contributors); + return contributorsMap.keySet().stream() + .collect(toMap(identity(), field -> parseContributorRole(contributorsMap.get(field)))); + } + + private ContributorRole parseContributorRole(String contributorRole) { + try { + return ContributorRole.fromValue(contributorRole); + } catch (IllegalArgumentException ex) { + throw new IllegalArgumentException("The contributor role " + contributorRole + + " is invalid, allowed values are " + getAllowedContributorRoles(), ex); + } + } + + private List getAllowedContributorRoles() { + return Arrays.asList(ContributorRole.values()).stream() + .map(ContributorRole::value) + .collect(Collectors.toList()); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidCommonObjectFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidCommonObjectFactory.java new file mode 100644 index 000000000000..4ca36c216919 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidCommonObjectFactory.java @@ -0,0 +1,93 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory; + +import java.util.Optional; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.exception.OrcidValidationException; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.common.FundingContributorRole; +import org.orcid.jaxb.model.v3.release.common.Contributor; +import org.orcid.jaxb.model.v3.release.common.Country; +import org.orcid.jaxb.model.v3.release.common.FuzzyDate; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.FundingContributor; + +/** + * Interface for factory classes that creates common ORCID objects. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidCommonObjectFactory { + + /** + * Creates an instance of {@link FuzzyDate} if the given metadata value + * represent a date with a supported format. + * + * @param metadataValue the metadata value + * @return the FuzzyDate istance, if any + */ + public Optional createFuzzyDate(MetadataValue metadataValue); + + /** + * Creates an instance of {@link Organization} from the given orgUnit item. + * + * @param context the DSpace context + * @param orgUnit the orgUnit item + * @return the created Organization's instance, if any + */ + public Optional createOrganization(Context context, Item orgUnit); + + /** + * Creates an instance of {@link Contributor} from the given metadata value. + * + * @param context the DSpace context + * @param metadataValue the metadata value + * @param role the contributor role + * @return the created Contributor instance, if any + */ + public Optional createContributor(Context context, MetadataValue metadataValue, ContributorRole role); + + /** + * Creates an instance of {@link FundingContributor} from the given metadata + * value. + * + * @param context the DSpace context + * @param metadataValue the metadata value + * @param role the contributor role + * @return the created FundingContributor instance, if any + */ + public Optional createFundingContributor(Context context, MetadataValue metadataValue, + FundingContributorRole role); + + /** + * Creates an instance of {@link Url} from the given item. + * @param context the DSpace context + * @param item the item + * @return the created Url instance, if any + */ + public Optional createUrl(Context context, Item item); + + /** + * Creates an instance of {@link Country} from the given metadata value. + * + * @param context the DSpace context + * @param metadataValue the metadata value + * @return the created Country instance, if any + * @throws OrcidValidationException if the given metadata value is not a valid + * ISO 3611 country + */ + public Optional createCountry(Context context, MetadataValue metadataValue) + throws OrcidValidationException; + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidEntityFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidEntityFactory.java new file mode 100644 index 000000000000..3fbad15911cd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidEntityFactory.java @@ -0,0 +1,43 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidEntityType; +import org.orcid.jaxb.model.v3.release.record.Activity; + +/** + * Interface to mark factories of Orcid entities. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidEntityFactory { + + /** + * Placeholder used to refer the item handle on fields mapping. + */ + String SIMPLE_HANDLE_PLACEHOLDER = "$simple-handle"; + + /** + * Returns the entity type created from this factory. + * + * @return the entity type + */ + public OrcidEntityType getEntityType(); + + /** + * Creates an ORCID activity from the given object. + * + * @param context the DSpace context + * @param item the item + * @return the created activity instance + */ + public Activity createOrcidObject(Context context, Item item); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidFactoryUtils.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidFactoryUtils.java new file mode 100644 index 000000000000..4b8c1178efeb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidFactoryUtils.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory; + +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; + +/** + * Utility class for Orcid factory classes. This is used to parse the + * configuration of ORCID entities defined in orcid.cfg (for example see + * contributors and external ids configuration). + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public final class OrcidFactoryUtils { + + private OrcidFactoryUtils() { + + } + + /** + * Parse the given configurations value and returns a map with metadata fields + * as keys and types/sources as values. The expected configuration syntax is a + * list of values field::type separated by commas. + * + * @param configurations the configurations to parse + * @return the configurations parsing result as map + */ + public static Map parseConfigurations(String configurations) { + Map configurationMap = new HashMap(); + if (StringUtils.isBlank(configurations)) { + return configurationMap; + } + + for (String configuration : configurations.split(",")) { + String[] configurationSections = parseConfiguration(configuration); + configurationMap.put(configurationSections[0], configurationSections[1]); + } + + return configurationMap; + } + + /** + * Parse the given configuration value and returns it's section. The expected + * configuration syntax is field::type. + * + * @param configuration the configuration to parse + * @return the configuration sections + * @throws IllegalStateException if the given configuration is not valid + */ + private static String[] parseConfiguration(String configuration) { + String[] configurations = configuration.split("::"); + if (configurations.length != 2) { + throw new IllegalStateException( + "The configuration '" + configuration + "' is not valid. Expected field::type"); + } + return configurations; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidProfileSectionFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidProfileSectionFactory.java new file mode 100644 index 000000000000..731b6f84a336 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/OrcidProfileSectionFactory.java @@ -0,0 +1,78 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.profile.OrcidProfileSyncPreference; + +/** + * Interface for classes that creates ORCID profile section object. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidProfileSectionFactory { + + /** + * Creates an instance of an ORCID object starting from the metadata values + * + * @param context the DSpace Context + * @param metadataValues the metadata values + * @return the ORCID object + */ + public Object create(Context context, List metadataValues); + + /** + * Returns the profile section type related to this factory. + * + * @return the profile section type + */ + public OrcidProfileSectionType getProfileSectionType(); + + /** + * Returns the profile synchronization preference related to this factory. + * + * @return the synchronization preference + */ + public OrcidProfileSyncPreference getSynchronizationPreference(); + + /** + * Returns all the metadata fields involved in the profile section + * configuration. + * + * @return the metadataFields + */ + public List getMetadataFields(); + + /** + * Given the input item's metadata values generate a metadata signature for each + * metadata field groups handled by this factory or for each metadata fields if + * the factory is configured with single metadata fields. + * + * @param context the DSpace context + * @param item the item + * @return the metadata signatures + */ + public List getMetadataSignatures(Context context, Item item); + + /** + * Returns a description of the item's metadata values related to the given + * signature. + * + * @param context the DSpace context + * @param item the item + * @param signature the metadata signature + * @return the metadata values description + */ + public String getDescription(Context context, Item item, String signature); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java new file mode 100644 index 000000000000..2c272e620cca --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/AbstractOrcidProfileSectionFactory.java @@ -0,0 +1,73 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static java.lang.String.format; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.factory.OrcidCommonObjectFactory; +import org.dspace.orcid.model.factory.OrcidProfileSectionFactory; +import org.dspace.orcid.service.MetadataSignatureGenerator; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Abstract class for that handle commons behaviors of all the available orcid + * profile section factories. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public abstract class AbstractOrcidProfileSectionFactory implements OrcidProfileSectionFactory { + + protected final OrcidProfileSectionType sectionType; + + protected final OrcidProfileSyncPreference preference; + + @Autowired + protected ItemService itemService; + + @Autowired + protected OrcidCommonObjectFactory orcidCommonObjectFactory; + + @Autowired + protected MetadataSignatureGenerator metadataSignatureGenerator; + + public AbstractOrcidProfileSectionFactory(OrcidProfileSectionType sectionType, + OrcidProfileSyncPreference preference) { + this.sectionType = sectionType; + this.preference = preference; + + if (!getSupportedTypes().contains(sectionType)) { + throw new IllegalArgumentException(format("The ORCID configuration does not support " + + "the section type %s. Supported types are %s", sectionType, getSupportedTypes())); + } + } + + protected abstract List getSupportedTypes(); + + @Override + public OrcidProfileSectionType getProfileSectionType() { + return sectionType; + } + + @Override + public OrcidProfileSyncPreference getSynchronizationPreference() { + return preference; + } + + protected List getMetadataValues(Item item, String metadataField) { + return itemService.getMetadataByMetadataString(item, metadataField); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java new file mode 100644 index 000000000000..2f47aa53d69d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidCommonObjectFactoryImpl.java @@ -0,0 +1,308 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static java.util.Optional.empty; +import static java.util.Optional.of; +import static java.util.Optional.ofNullable; +import static org.apache.commons.lang3.EnumUtils.isValidEnum; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.dspace.orcid.model.factory.OrcidFactoryUtils.parseConfigurations; +import static org.orcid.jaxb.model.common.SequenceType.ADDITIONAL; +import static org.orcid.jaxb.model.common.SequenceType.FIRST; + +import java.time.LocalDate; +import java.time.ZoneId; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.handle.service.HandleService; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.exception.OrcidValidationException; +import org.dspace.orcid.model.factory.OrcidCommonObjectFactory; +import org.dspace.orcid.model.validator.OrcidValidationError; +import org.dspace.util.MultiFormatDateParser; +import org.dspace.util.SimpleMapConverter; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.common.FundingContributorRole; +import org.orcid.jaxb.model.common.Iso3166Country; +import org.orcid.jaxb.model.v3.release.common.Contributor; +import org.orcid.jaxb.model.v3.release.common.ContributorAttributes; +import org.orcid.jaxb.model.v3.release.common.Country; +import org.orcid.jaxb.model.v3.release.common.CreditName; +import org.orcid.jaxb.model.v3.release.common.DisambiguatedOrganization; +import org.orcid.jaxb.model.v3.release.common.FuzzyDate; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.OrganizationAddress; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.FundingContributor; +import org.orcid.jaxb.model.v3.release.record.FundingContributorAttributes; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidCommonObjectFactory}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidCommonObjectFactoryImpl implements OrcidCommonObjectFactory { + + @Autowired + private ItemService itemService; + + @Autowired + private OrcidConfiguration orcidConfiguration; + + @Autowired + private HandleService handleService; + + private SimpleMapConverter countryConverter; + + private String organizationTitleField; + + private String organizationCityField; + + private String organizationCountryField; + + private String contributorEmailField; + + private String contributorOrcidField; + + private Map disambiguatedOrganizationIdentifierFields = new HashMap<>(); + + @Override + public Optional createFuzzyDate(MetadataValue metadataValue) { + + if (isUnprocessableValue(metadataValue)) { + return empty(); + } + + Date date = MultiFormatDateParser.parse(metadataValue.getValue()); + if (date == null) { + return empty(); + } + + LocalDate localDate = convertToLocalDate(date); + return of(FuzzyDate.valueOf(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth())); + } + + @Override + public Optional createOrganization(Context context, Item orgUnit) { + + if (orgUnit == null) { + return Optional.empty(); + } + + Organization organization = new Organization(); + + organization.setName(getMetadataValue(orgUnit, organizationTitleField)); + organization.setAddress(createOrganizationAddress(orgUnit)); + organization.setDisambiguatedOrganization(createDisambiguatedOrganization(orgUnit)); + + return of(organization); + } + + @Override + public Optional createContributor(Context context, MetadataValue metadataValue, ContributorRole role) { + if (isUnprocessableValue(metadataValue)) { + return empty(); + } + + Contributor contributor = new Contributor(); + contributor.setCreditName(new CreditName(metadataValue.getValue())); + contributor.setContributorAttributes(getContributorAttributes(metadataValue, role)); + + return of(contributor); + } + + @Override + public Optional createFundingContributor(Context context, MetadataValue metadataValue, + FundingContributorRole role) { + + if (isUnprocessableValue(metadataValue)) { + return empty(); + } + + FundingContributor contributor = new FundingContributor(); + contributor.setCreditName(new CreditName(metadataValue.getValue())); + contributor.setContributorAttributes(getFundingContributorAttributes(metadataValue, role)); + + return of(contributor); + } + + @Override + public Optional createUrl(Context context, Item item) { + String handle = item.getHandle(); + if (StringUtils.isBlank(handle)) { + return empty(); + } + + return of(new Url(handleService.getCanonicalForm(handle))); + } + + @Override + public Optional createCountry(Context context, MetadataValue metadataValue) { + + if (isUnprocessableValue(metadataValue)) { + return empty(); + } + + Optional country = convertToIso3166Country(metadataValue.getValue()); + + if (country.isEmpty()) { + throw new OrcidValidationException(OrcidValidationError.INVALID_COUNTRY); + } + + return country.map(isoCountry -> new Country(isoCountry)); + } + + private ContributorAttributes getContributorAttributes(MetadataValue metadataValue, ContributorRole role) { + ContributorAttributes attributes = new ContributorAttributes(); + attributes.setContributorRole(role != null ? role : null); + attributes.setContributorSequence(metadataValue.getPlace() == 0 ? FIRST : ADDITIONAL); + return attributes; + } + + private OrganizationAddress createOrganizationAddress(Item organizationItem) { + OrganizationAddress address = new OrganizationAddress(); + + address.setCity(getMetadataValue(organizationItem, organizationCityField)); + + convertToIso3166Country(getMetadataValue(organizationItem, organizationCountryField)) + .ifPresent(address::setCountry); + + return address; + } + + private FundingContributorAttributes getFundingContributorAttributes(MetadataValue metadataValue, + FundingContributorRole role) { + FundingContributorAttributes attributes = new FundingContributorAttributes(); + attributes.setContributorRole(role != null ? role : null); + return attributes; + } + + private DisambiguatedOrganization createDisambiguatedOrganization(Item organizationItem) { + + for (String identifierField : disambiguatedOrganizationIdentifierFields.keySet()) { + + String source = disambiguatedOrganizationIdentifierFields.get(identifierField); + String identifier = getMetadataValue(organizationItem, identifierField); + + if (isNotBlank(identifier)) { + DisambiguatedOrganization disambiguatedOrganization = new DisambiguatedOrganization(); + disambiguatedOrganization.setDisambiguatedOrganizationIdentifier(identifier); + disambiguatedOrganization.setDisambiguationSource(source); + return disambiguatedOrganization; + } + + } + + return null; + } + + private Optional convertToIso3166Country(String countryValue) { + return ofNullable(countryValue) + .map(value -> countryConverter != null ? countryConverter.getValue(value) : value) + .filter(value -> isValidEnum(Iso3166Country.class, value)) + .map(value -> Iso3166Country.fromValue(value)); + } + + private boolean isUnprocessableValue(MetadataValue value) { + return value == null || isBlank(value.getValue()); + } + + private String getMetadataValue(Item item, String metadataField) { + if (StringUtils.isNotBlank(metadataField)) { + return itemService.getMetadataFirstValue(item, new MetadataFieldName(metadataField), Item.ANY); + } else { + return null; + } + } + + private LocalDate convertToLocalDate(Date date) { + return date.toInstant().atZone(ZoneId.systemDefault()).toLocalDate(); + } + + public String getOrganizationCityField() { + return organizationCityField; + } + + public String getOrganizationCountryField() { + return organizationCountryField; + } + + public Map getDisambiguatedOrganizationIdentifierFields() { + return disambiguatedOrganizationIdentifierFields; + } + + public String getContributorEmailField() { + return contributorEmailField; + } + + public String getContributorOrcidField() { + return contributorOrcidField; + } + + public void setItemService(ItemService itemService) { + this.itemService = itemService; + } + + public OrcidConfiguration getOrcidConfiguration() { + return orcidConfiguration; + } + + public void setOrcidConfiguration(OrcidConfiguration orcidConfiguration) { + this.orcidConfiguration = orcidConfiguration; + } + + public void setOrganizationCityField(String organizationCityField) { + this.organizationCityField = organizationCityField; + } + + public void setOrganizationCountryField(String organizationCountryField) { + this.organizationCountryField = organizationCountryField; + } + + public void setContributorEmailField(String contributorEmailField) { + this.contributorEmailField = contributorEmailField; + } + + public void setContributorOrcidField(String contributorOrcidField) { + this.contributorOrcidField = contributorOrcidField; + } + + public void setDisambiguatedOrganizationIdentifierFields(String disambiguatedOrganizationIds) { + this.disambiguatedOrganizationIdentifierFields = parseConfigurations(disambiguatedOrganizationIds); + } + + public SimpleMapConverter getCountryConverter() { + return countryConverter; + } + + public void setCountryConverter(SimpleMapConverter countryConverter) { + this.countryConverter = countryConverter; + } + + public String getOrganizationTitleField() { + return organizationTitleField; + } + + public void setOrganizationTitleField(String organizationTitleField) { + this.organizationTitleField = organizationTitleField; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidFundingFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidFundingFactory.java new file mode 100644 index 000000000000..890b54f12b1c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidFundingFactory.java @@ -0,0 +1,301 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +import java.sql.SQLException; +import java.util.Collection; +import java.util.Currency; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.RelationshipTypeService; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidFundingFieldMapping; +import org.dspace.orcid.model.factory.OrcidCommonObjectFactory; +import org.dspace.orcid.model.factory.OrcidEntityFactory; +import org.orcid.jaxb.model.common.FundingContributorRole; +import org.orcid.jaxb.model.common.FundingType; +import org.orcid.jaxb.model.v3.release.common.Amount; +import org.orcid.jaxb.model.v3.release.common.FuzzyDate; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.Title; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.Activity; +import org.orcid.jaxb.model.v3.release.record.ExternalID; +import org.orcid.jaxb.model.v3.release.record.ExternalIDs; +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.FundingContributor; +import org.orcid.jaxb.model.v3.release.record.FundingContributors; +import org.orcid.jaxb.model.v3.release.record.FundingTitle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidEntityFactory} that creates instances of + * {@link Funding}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidFundingFactory implements OrcidEntityFactory { + + private static final Logger LOGGER = LoggerFactory.getLogger(OrcidFundingFactory.class); + + @Autowired + private ItemService itemService; + + @Autowired + private OrcidCommonObjectFactory orcidCommonObjectFactory; + + @Autowired + private RelationshipTypeService relationshipTypeService; + + @Autowired + private RelationshipService relationshipService; + + private OrcidFundingFieldMapping fieldMapping; + + @Override + public OrcidEntityType getEntityType() { + return OrcidEntityType.FUNDING; + } + + @Override + public Activity createOrcidObject(Context context, Item item) { + Funding funding = new Funding(); + funding.setContributors(getContributors(context, item)); + funding.setDescription(getDescription(context, item)); + funding.setEndDate(getEndDate(context, item)); + funding.setExternalIdentifiers(getExternalIds(context, item)); + funding.setOrganization(getOrganization(context, item)); + funding.setStartDate(getStartDate(context, item)); + funding.setTitle(getTitle(context, item)); + funding.setType(getType(context, item)); + funding.setUrl(getUrl(context, item)); + funding.setAmount(getAmount(context, item)); + return funding; + } + + private FundingContributors getContributors(Context context, Item item) { + FundingContributors fundingContributors = new FundingContributors(); + getMetadataValues(context, item, fieldMapping.getContributorFields().keySet()).stream() + .map(metadataValue -> getFundingContributor(context, metadataValue)) + .filter(Optional::isPresent) + .map(Optional::get) + .forEach(fundingContributors.getContributor()::add); + return fundingContributors; + } + + private Optional getFundingContributor(Context context, MetadataValue metadataValue) { + String metadataField = metadataValue.getMetadataField().toString('.'); + FundingContributorRole role = fieldMapping.getContributorFields().get(metadataField); + return orcidCommonObjectFactory.createFundingContributor(context, metadataValue, role); + } + + + private String getDescription(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getDescriptionField()) + .map(MetadataValue::getValue) + .orElse(null); + } + + private FuzzyDate getEndDate(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getEndDateField()) + .flatMap(metadataValue -> orcidCommonObjectFactory.createFuzzyDate(metadataValue)) + .orElse(null); + } + + private ExternalIDs getExternalIds(Context context, Item item) { + ExternalIDs externalIdentifiers = new ExternalIDs(); + + getMetadataValues(context, item, fieldMapping.getExternalIdentifierFields().keySet()).stream() + .map(this::getExternalId) + .forEach(externalIdentifiers.getExternalIdentifier()::add); + + return externalIdentifiers; + } + + private ExternalID getExternalId(MetadataValue metadataValue) { + String metadataField = metadataValue.getMetadataField().toString('.'); + return getExternalId(fieldMapping.getExternalIdentifierFields().get(metadataField), metadataValue.getValue()); + } + + private ExternalID getExternalId(String type, String value) { + ExternalID externalID = new ExternalID(); + externalID.setType(type); + externalID.setValue(value); + externalID.setRelationship(org.orcid.jaxb.model.common.Relationship.SELF); + return externalID; + } + + /** + * Returns an Organization ORCID entity related to the given item. The + * relationship type configured with + * orcid.mapping.funding.organization-relationship-type is the relationship used + * to search the Organization of the given project item. + */ + private Organization getOrganization(Context context, Item item) { + + try { + + return relationshipTypeService.findByLeftwardOrRightwardTypeName(context, + fieldMapping.getOrganizationRelationshipType()).stream() + .flatMap(relationshipType -> getRelationships(context, item, relationshipType)) + .map(relationship -> getRelatedItem(item, relationship)) + .flatMap(orgUnit -> orcidCommonObjectFactory.createOrganization(context, orgUnit).stream()) + .findFirst() + .orElse(null); + + } catch (SQLException e) { + throw new RuntimeException(e); + } + + } + + private Stream getRelationships(Context context, Item item, RelationshipType relationshipType) { + try { + return relationshipService.findByItemAndRelationshipType(context, item, relationshipType).stream(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private Item getRelatedItem(Item item, Relationship relationship) { + return item.equals(relationship.getLeftItem()) ? relationship.getRightItem() : relationship.getLeftItem(); + } + + private FuzzyDate getStartDate(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getStartDateField()) + .flatMap(metadataValue -> orcidCommonObjectFactory.createFuzzyDate(metadataValue)) + .orElse(null); + } + + private FundingTitle getTitle(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getTitleField()) + .map(metadataValue -> getFundingTitle(context, metadataValue)) + .orElse(null); + } + + private FundingTitle getFundingTitle(Context context, MetadataValue metadataValue) { + FundingTitle fundingTitle = new FundingTitle(); + fundingTitle.setTitle(new Title(metadataValue.getValue())); + return fundingTitle; + } + + /** + * Returns an instance of FundingType taking the type from the given item. The + * metadata field to be used to retrieve the item's type is related to the + * configured typeField (orcid.mapping.funding.type). + */ + private FundingType getType(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getTypeField()) + .map(type -> fieldMapping.convertType(type.getValue())) + .flatMap(this::getFundingType) + .orElse(FundingType.CONTRACT); + } + + private Optional getFundingType(String type) { + try { + return Optional.ofNullable(FundingType.fromValue(type)); + } catch (IllegalArgumentException ex) { + LOGGER.warn("The type {} is not valid for ORCID fundings", type); + return Optional.empty(); + } + } + + private Url getUrl(Context context, Item item) { + return orcidCommonObjectFactory.createUrl(context, item).orElse(null); + } + + /** + * Returns an Amount instance taking the amount and currency value from the + * configured metadata values of the given item, if any. + */ + private Amount getAmount(Context context, Item item) { + + Optional amount = getAmountValue(context, item); + Optional currency = getCurrencyValue(context, item); + + if (amount.isEmpty() || currency.isEmpty()) { + return null; + } + + return getAmount(amount.get(), currency.get()); + } + + /** + * Returns the amount value of the configured metadata field + * orcid.mapping.funding.amount + */ + private Optional getAmountValue(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getAmountField()) + .map(MetadataValue::getValue); + } + + /** + * Returns the amount value of the configured metadata field + * orcid.mapping.funding.amount.currency (if configured using the converter + * orcid.mapping.funding.amount.currency.converter). + */ + private Optional getCurrencyValue(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getAmountCurrencyField()) + .map(currency -> fieldMapping.convertAmountCurrency(currency.getValue())) + .filter(currency -> isValidCurrency(currency)); + } + + private boolean isValidCurrency(String currency) { + try { + return currency != null && Currency.getInstance(currency) != null; + } catch (IllegalArgumentException ex) { + return false; + } + } + + private Amount getAmount(String amount, String currency) { + Amount amountObj = new Amount(); + amountObj.setContent(amount); + amountObj.setCurrencyCode(currency); + return amountObj; + } + + private List getMetadataValues(Context context, Item item, Collection metadataFields) { + return metadataFields.stream() + .flatMap(metadataField -> itemService.getMetadataByMetadataString(item, metadataField).stream()) + .collect(Collectors.toList()); + } + + private Optional getMetadataValue(Context context, Item item, String metadataField) { + if (isBlank(metadataField)) { + return Optional.empty(); + } + return itemService.getMetadataByMetadataString(item, metadataField).stream().findFirst() + .filter(metadataValue -> isNotBlank(metadataValue.getValue())); + } + + public OrcidFundingFieldMapping getFieldMapping() { + return fieldMapping; + } + + public void setFieldMapping(OrcidFundingFieldMapping fieldMapping) { + this.fieldMapping = fieldMapping; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidPersonExternalIdentifierFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidPersonExternalIdentifierFactory.java new file mode 100644 index 000000000000..077bb195a6bc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidPersonExternalIdentifierFactory.java @@ -0,0 +1,75 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static org.dspace.orcid.model.OrcidProfileSectionType.EXTERNAL_IDS; +import static org.dspace.orcid.model.factory.OrcidFactoryUtils.parseConfigurations; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.orcid.jaxb.model.common.Relationship; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.PersonExternalIdentifier; + +/** + * Implementation of {@link OrcidProfileSectionFactory} that model an personal + * external id. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidPersonExternalIdentifierFactory extends OrcidSimpleValueObjectFactory { + + private Map externalIds = new HashMap<>(); + + public OrcidPersonExternalIdentifierFactory(OrcidProfileSectionType sectionType, + OrcidProfileSyncPreference preference) { + super(sectionType, preference); + } + + @Override + public List getSupportedTypes() { + return List.of(EXTERNAL_IDS); + } + + @Override + protected Object create(Context context, MetadataValue metadataValue) { + + String currentMetadataField = metadataValue.getMetadataField().toString('.'); + String externalIdType = externalIds.get(currentMetadataField); + + if (externalIdType == null) { + throw new IllegalArgumentException("Metadata field not supported: " + currentMetadataField); + } + + PersonExternalIdentifier externalId = new PersonExternalIdentifier(); + externalId.setValue(metadataValue.getValue()); + externalId.setType(externalIdType); + externalId.setRelationship(Relationship.SELF); + externalId.setUrl(new Url(metadataValue.getValue())); + + return externalId; + } + + public Map getExternalIds() { + return externalIds; + } + + public void setExternalIds(String externalIds) { + this.externalIds = parseConfigurations(externalIds); + setMetadataFields(this.externalIds.keySet().stream().collect(Collectors.joining(","))); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java new file mode 100644 index 000000000000..4ddfbe47a328 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidSimpleValueObjectFactory.java @@ -0,0 +1,149 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static java.util.Arrays.asList; +import static java.util.Collections.emptyList; +import static org.dspace.orcid.model.OrcidProfileSectionType.COUNTRY; +import static org.dspace.orcid.model.OrcidProfileSectionType.KEYWORDS; +import static org.dspace.orcid.model.OrcidProfileSectionType.OTHER_NAMES; +import static org.dspace.orcid.model.OrcidProfileSectionType.RESEARCHER_URLS; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.commons.collections.CollectionUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.orcid.jaxb.model.v3.release.common.Country; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.Address; +import org.orcid.jaxb.model.v3.release.record.Keyword; +import org.orcid.jaxb.model.v3.release.record.OtherName; +import org.orcid.jaxb.model.v3.release.record.ResearcherUrl; + +/** + * Implementation of {@link OrcidProfileSectionFactory} that creates ORCID + * objects with a single value. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidSimpleValueObjectFactory extends AbstractOrcidProfileSectionFactory { + + private List metadataFields = new ArrayList(); + + public OrcidSimpleValueObjectFactory(OrcidProfileSectionType sectionType, OrcidProfileSyncPreference preference) { + super(sectionType, preference); + } + + @Override + public List getSupportedTypes() { + return List.of(COUNTRY, KEYWORDS, OTHER_NAMES, RESEARCHER_URLS); + } + + @Override + public Object create(Context context, List metadataValues) { + + if (CollectionUtils.isEmpty(metadataValues)) { + throw new IllegalArgumentException("No metadata values provided to create ORCID object with simple value"); + } + + if (metadataValues.size() > 1) { + throw new IllegalArgumentException("Multiple metadata values not supported: " + metadataValues); + } + + MetadataValue metadataValue = metadataValues.get(0); + String currentMetadataField = metadataValue.getMetadataField().toString('.'); + + if (!metadataFields.contains(currentMetadataField)) { + throw new IllegalArgumentException("Metadata field not supported: " + currentMetadataField); + } + + return create(context, metadataValue); + } + + @Override + public List getMetadataSignatures(Context context, Item item) { + return metadataFields.stream() + .flatMap(metadataField -> getMetadataValues(item, metadataField).stream()) + .map(metadataValue -> metadataSignatureGenerator.generate(context, List.of(metadataValue))) + .collect(Collectors.toList()); + } + + @Override + public String getDescription(Context context, Item item, String signature) { + List metadataValues = metadataSignatureGenerator.findBySignature(context, item, signature); + return CollectionUtils.isNotEmpty(metadataValues) ? metadataValues.get(0).getValue() : null; + } + + /** + * Create an instance of ORCID profile section based on the configured profile + * section type, taking the value from the given metadataValue. + */ + protected Object create(Context context, MetadataValue metadataValue) { + switch (getProfileSectionType()) { + case COUNTRY: + return createAddress(context, metadataValue); + case KEYWORDS: + return createKeyword(metadataValue); + case OTHER_NAMES: + return createOtherName(metadataValue); + case RESEARCHER_URLS: + return createResearcherUrl(metadataValue); + default: + throw new IllegalStateException("OrcidSimpleValueObjectFactory does not support type " + + getProfileSectionType()); + } + } + + private ResearcherUrl createResearcherUrl(MetadataValue metadataValue) { + ResearcherUrl researcherUrl = new ResearcherUrl(); + researcherUrl.setUrl(new Url(metadataValue.getValue())); + return researcherUrl; + } + + private OtherName createOtherName(MetadataValue metadataValue) { + OtherName otherName = new OtherName(); + otherName.setContent(metadataValue.getValue()); + return otherName; + } + + private Keyword createKeyword(MetadataValue metadataValue) { + Keyword keyword = new Keyword(); + keyword.setContent(metadataValue.getValue()); + return keyword; + } + + private Address createAddress(Context context, MetadataValue metadataValue) { + return orcidCommonObjectFactory.createCountry(context, metadataValue) + .map(this::createAddress) + .orElseThrow(() -> new IllegalArgumentException("No address creatable " + + "from value " + metadataValue.getValue())); + } + + private Address createAddress(Country country) { + Address address = new Address(); + address.setCountry(country); + return address; + } + + public void setMetadataFields(String metadataFields) { + this.metadataFields = metadataFields != null ? asList(metadataFields.split(",")) : emptyList(); + } + + @Override + public List getMetadataFields() { + return metadataFields; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidWorkFactory.java b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidWorkFactory.java new file mode 100644 index 000000000000..53b46d8256d1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/factory/impl/OrcidWorkFactory.java @@ -0,0 +1,283 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.factory.impl; + +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.orcid.jaxb.model.common.Relationship.SELF; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.EnumUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidWorkFieldMapping; +import org.dspace.orcid.model.factory.OrcidCommonObjectFactory; +import org.dspace.orcid.model.factory.OrcidEntityFactory; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.common.LanguageCode; +import org.orcid.jaxb.model.common.Relationship; +import org.orcid.jaxb.model.common.WorkType; +import org.orcid.jaxb.model.v3.release.common.Contributor; +import org.orcid.jaxb.model.v3.release.common.PublicationDate; +import org.orcid.jaxb.model.v3.release.common.Subtitle; +import org.orcid.jaxb.model.v3.release.common.Title; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.Activity; +import org.orcid.jaxb.model.v3.release.record.ExternalID; +import org.orcid.jaxb.model.v3.release.record.ExternalIDs; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkContributors; +import org.orcid.jaxb.model.v3.release.record.WorkTitle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidEntityFactory} that creates instances of + * {@link Work}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidWorkFactory implements OrcidEntityFactory { + + private static final Logger LOGGER = LoggerFactory.getLogger(OrcidWorkFactory.class); + + @Autowired + private ItemService itemService; + + @Autowired + private OrcidCommonObjectFactory orcidCommonObjectFactory; + + private OrcidWorkFieldMapping fieldMapping; + + @Override + public OrcidEntityType getEntityType() { + return OrcidEntityType.PUBLICATION; + } + + @Override + public Activity createOrcidObject(Context context, Item item) { + Work work = new Work(); + work.setJournalTitle(getJournalTitle(context, item)); + work.setWorkContributors(getWorkContributors(context, item)); + work.setWorkTitle(getWorkTitle(context, item)); + work.setPublicationDate(getPublicationDate(context, item)); + work.setWorkExternalIdentifiers(getWorkExternalIds(context, item)); + work.setWorkType(getWorkType(context, item)); + work.setShortDescription(getShortDescription(context, item)); + work.setLanguageCode(getLanguageCode(context, item)); + work.setUrl(getUrl(context, item)); + return work; + } + + private Title getJournalTitle(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getJournalTitleField()) + .map(metadataValue -> new Title(metadataValue.getValue())) + .orElse(null); + } + + private WorkContributors getWorkContributors(Context context, Item item) { + Map contributorFields = fieldMapping.getContributorFields(); + List contributors = getMetadataValues(context, item, contributorFields.keySet()).stream() + .map(metadataValue -> getContributor(context, metadataValue)) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); + return new WorkContributors(contributors); + } + + private Optional getContributor(Context context, MetadataValue metadataValue) { + Map contributorFields = fieldMapping.getContributorFields(); + ContributorRole role = contributorFields.get(metadataValue.getMetadataField().toString('.')); + return orcidCommonObjectFactory.createContributor(context, metadataValue, role); + } + + /** + * Create an instance of WorkTitle from the given item. + */ + private WorkTitle getWorkTitle(Context context, Item item) { + Optional workTitleValue = getWorkTitleValue(context, item); + if (workTitleValue.isEmpty()) { + return null; + } + + WorkTitle workTitle = new WorkTitle(); + workTitle.setTitle(new Title(workTitleValue.get())); + getSubTitle(context, item).ifPresent(workTitle::setSubtitle); + return workTitle; + } + + /** + * Take the work title from the configured metadata field of the given item + * (orcid.mapping.work.title), if any. + */ + private Optional getWorkTitleValue(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getTitleField()) + .map(MetadataValue::getValue); + } + + /** + * Take the work title from the configured metadata field of the given item + * (orcid.mapping.work.sub-title), if any. + */ + private Optional getSubTitle(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getSubTitleField()) + .map(MetadataValue::getValue) + .map(Subtitle::new); + } + + private PublicationDate getPublicationDate(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getPublicationDateField()) + .flatMap(orcidCommonObjectFactory::createFuzzyDate) + .map(PublicationDate::new) + .orElse(null); + } + + /** + * Creates an instance of ExternalIDs from the metadata values of the given + * item, using the orcid.mapping.funding.external-ids configuration. + */ + private ExternalIDs getWorkExternalIds(Context context, Item item) { + ExternalIDs externalIdentifiers = new ExternalIDs(); + externalIdentifiers.getExternalIdentifier().addAll(getWorkSelfExternalIds(context, item)); + return externalIdentifiers; + } + + /** + * Creates a list of ExternalID, one for orcid.mapping.funding.external-ids + * value, taking the values from the given item. + */ + private List getWorkSelfExternalIds(Context context, Item item) { + + List selfExternalIds = new ArrayList(); + + Map externalIdentifierFields = fieldMapping.getExternalIdentifierFields(); + + if (externalIdentifierFields.containsKey(SIMPLE_HANDLE_PLACEHOLDER)) { + String handleType = externalIdentifierFields.get(SIMPLE_HANDLE_PLACEHOLDER); + selfExternalIds.add(getExternalId(handleType, item.getHandle(), SELF)); + } + + getMetadataValues(context, item, externalIdentifierFields.keySet()).stream() + .map(this::getSelfExternalId) + .forEach(selfExternalIds::add); + + return selfExternalIds; + } + + /** + * Creates an instance of ExternalID taking the value from the given + * metadataValue. The type of the ExternalID is calculated using the + * orcid.mapping.funding.external-ids configuration. The relationship of the + * ExternalID is SELF. + */ + private ExternalID getSelfExternalId(MetadataValue metadataValue) { + Map externalIdentifierFields = fieldMapping.getExternalIdentifierFields(); + String metadataField = metadataValue.getMetadataField().toString('.'); + return getExternalId(externalIdentifierFields.get(metadataField), metadataValue.getValue(), SELF); + } + + /** + * Creates an instance of ExternalID with the given type, value and + * relationship. + */ + private ExternalID getExternalId(String type, String value, Relationship relationship) { + ExternalID externalID = new ExternalID(); + externalID.setType(type); + externalID.setValue(value); + externalID.setRelationship(relationship); + return externalID; + } + + /** + * Creates an instance of WorkType from the given item, taking the value fom the + * configured metadata field (orcid.mapping.work.type). + */ + private WorkType getWorkType(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getTypeField()) + .map(MetadataValue::getValue) + .map(type -> fieldMapping.convertType(type)) + .flatMap(this::getWorkType) + .orElse(WorkType.UNDEFINED); + } + + /** + * Creates an instance of WorkType from the given workType value, if valid. + */ + private Optional getWorkType(String workType) { + try { + return Optional.ofNullable(WorkType.fromValue(workType)); + } catch (IllegalArgumentException ex) { + LOGGER.warn("The type {} is not valid for ORCID works", workType); + return Optional.empty(); + } + } + + private String getShortDescription(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getShortDescriptionField()) + .map(MetadataValue::getValue) + .orElse(null); + } + + private String getLanguageCode(Context context, Item item) { + return getMetadataValue(context, item, fieldMapping.getLanguageField()) + .map(MetadataValue::getValue) + .map(language -> fieldMapping.convertLanguage(language)) + .filter(language -> isValidLanguage(language)) + .orElse(null); + } + + private boolean isValidLanguage(String language) { + + if (isBlank(language)) { + return false; + } + + boolean isValid = EnumUtils.isValidEnum(LanguageCode.class, language); + if (!isValid) { + LOGGER.warn("The language {} is not a valid language code for ORCID works", language); + } + return isValid; + } + + private Url getUrl(Context context, Item item) { + return orcidCommonObjectFactory.createUrl(context, item).orElse(null); + } + + private List getMetadataValues(Context context, Item item, Collection metadataFields) { + return metadataFields.stream() + .flatMap(metadataField -> itemService.getMetadataByMetadataString(item, metadataField).stream()) + .collect(Collectors.toList()); + } + + private Optional getMetadataValue(Context context, Item item, String metadataField) { + + if (isBlank(metadataField)) { + return Optional.empty(); + } + + return itemService.getMetadataByMetadataString(item, metadataField).stream() + .filter(metadataValue -> isNotBlank(metadataValue.getValue())) + .findFirst(); + } + + public void setFieldMapping(OrcidWorkFieldMapping fieldMapping) { + this.fieldMapping = fieldMapping; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidationError.java b/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidationError.java new file mode 100644 index 000000000000..36f92cf1c5f4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidationError.java @@ -0,0 +1,49 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.validator; + +/** + * Enum that model all the errors that could occurs during an ORCID object + * validation. These codes are used by the {@link OrcidValidator} to returns the + * validation error related to a specific ORCID entity. The values of this enum + * are returned from the OrcidHistoryRestRepository and can be used to show an + * error message to the users when they tries to synchronize some data with + * ORCID. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidValidationError { + + AMOUNT_CURRENCY_REQUIRED("amount-currency.required"), + EXTERNAL_ID_REQUIRED("external-id.required"), + TITLE_REQUIRED("title.required"), + TYPE_REQUIRED("type.required"), + FUNDER_REQUIRED("funder.required"), + INVALID_COUNTRY("country.invalid"), + ORGANIZATION_NAME_REQUIRED("organization.name-required"), + PUBLICATION_DATE_INVALID("publication.date-invalid"), + ORGANIZATION_ADDRESS_REQUIRED("organization.address-required"), + ORGANIZATION_CITY_REQUIRED("organization.city-required"), + ORGANIZATION_COUNTRY_REQUIRED("organization.country-required"), + DISAMBIGUATED_ORGANIZATION_REQUIRED("disambiguated-organization.required"), + DISAMBIGUATED_ORGANIZATION_VALUE_REQUIRED("disambiguated-organization.value-required"), + DISAMBIGUATION_SOURCE_REQUIRED("disambiguation-source.required"), + DISAMBIGUATION_SOURCE_INVALID("disambiguation-source.invalid"); + + private final String code; + + private OrcidValidationError(String code) { + this.code = code; + } + + public String getCode() { + return code; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidator.java b/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidator.java new file mode 100644 index 000000000000..7b30717e2d45 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/validator/OrcidValidator.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.validator; + +import java.util.List; + +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.Work; + +/** + * Interface for classes that validate the ORCID entity objects. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidValidator { + + /** + * Validate the given orcid object and returns the validation errors, if any. + * + * @param object the ORCID object to validate + * @return the validation errors, if any + */ + List validate(Object object); + + /** + * Validate the given work and returns the validation errors, if any. + * + * @param work the work to validate + * @return the validation errors, if any + */ + List validateWork(Work work); + + /** + * Validate the given funding and returns the validation errors, if any. + * + * @param funding the funding to validate + * @return the validation errors, if any + */ + List validateFunding(Funding funding); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/model/validator/impl/OrcidValidatorImpl.java b/dspace-api/src/main/java/org/dspace/orcid/model/validator/impl/OrcidValidatorImpl.java new file mode 100644 index 000000000000..a599695c0757 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/model/validator/impl/OrcidValidatorImpl.java @@ -0,0 +1,235 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.validator.impl; + +import static org.apache.commons.collections.CollectionUtils.isEmpty; +import static org.apache.commons.lang3.ArrayUtils.contains; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.dspace.orcid.model.validator.OrcidValidationError.AMOUNT_CURRENCY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATED_ORGANIZATION_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATED_ORGANIZATION_VALUE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATION_SOURCE_INVALID; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATION_SOURCE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.EXTERNAL_ID_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.FUNDER_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_ADDRESS_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_CITY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_COUNTRY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_NAME_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.PUBLICATION_DATE_INVALID; +import static org.dspace.orcid.model.validator.OrcidValidationError.TITLE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.TYPE_REQUIRED; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.dspace.orcid.model.validator.OrcidValidationError; +import org.dspace.orcid.model.validator.OrcidValidator; +import org.dspace.services.ConfigurationService; +import org.orcid.jaxb.model.v3.release.common.DisambiguatedOrganization; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.OrganizationAddress; +import org.orcid.jaxb.model.v3.release.common.PublicationDate; +import org.orcid.jaxb.model.v3.release.common.Year; +import org.orcid.jaxb.model.v3.release.record.ExternalIDs; +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.FundingTitle; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkTitle; + +/** + * Implementation of {@link OrcidValidator}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidValidatorImpl implements OrcidValidator { + + private final ConfigurationService configurationService; + + public OrcidValidatorImpl(ConfigurationService configurationService) { + this.configurationService = configurationService; + } + + @Override + public List validate(Object object) { + + if (object instanceof Work && isWorkValidationEnabled()) { + return validateWork((Work) object); + } + + if (object instanceof Funding && isFundingValidationEnabled()) { + return validateFunding((Funding) object); + } + + return Collections.emptyList(); + } + + /** + * A work is valid if has title, type, a valid publication date and at least one + * external id. + */ + @Override + public List validateWork(Work work) { + List errors = new ArrayList(); + + WorkTitle title = work.getWorkTitle(); + if (title == null || title.getTitle() == null || isBlank(title.getTitle().getContent())) { + errors.add(TITLE_REQUIRED); + } + + if (work.getWorkType() == null) { + errors.add(TYPE_REQUIRED); + } + + ExternalIDs externalIdentifiers = work.getExternalIdentifiers(); + + if (externalIdentifiers == null || isEmpty(externalIdentifiers.getExternalIdentifier())) { + errors.add(EXTERNAL_ID_REQUIRED); + } + + PublicationDate publicationDate = work.getPublicationDate(); + if (publicationDate != null && isYearNotValid(publicationDate)) { + errors.add(PUBLICATION_DATE_INVALID); + } + + return errors; + } + + /** + * A funding is valid if has title, a valid funder organization and at least one + * external id. If it has an amount, the amount currency is required. + */ + @Override + public List validateFunding(Funding funding) { + + List errors = new ArrayList(); + + FundingTitle title = funding.getTitle(); + if (title == null || title.getTitle() == null || isBlank(title.getTitle().getContent())) { + errors.add(TITLE_REQUIRED); + } + + ExternalIDs externalIdentifiers = funding.getExternalIdentifiers(); + + if (externalIdentifiers == null || isEmpty(externalIdentifiers.getExternalIdentifier())) { + errors.add(EXTERNAL_ID_REQUIRED); + } + + if (funding.getOrganization() == null) { + errors.add(FUNDER_REQUIRED); + } else { + errors.addAll(validate(funding.getOrganization())); + } + + if (funding.getAmount() != null && isBlank(funding.getAmount().getCurrencyCode())) { + errors.add(AMOUNT_CURRENCY_REQUIRED); + } + + return errors; + } + + /** + * The organization is valid if it has a name, a valid address and a valid + * disambiguated-organization complex type. + */ + private List validate(Organization organization) { + List errors = new ArrayList(); + if (isBlank(organization.getName())) { + errors.add(ORGANIZATION_NAME_REQUIRED); + } + + errors.addAll(validate(organization.getAddress())); + errors.addAll(validate(organization.getDisambiguatedOrganization())); + + return errors; + } + + /** + * A disambiguated-organization type is valid if it has an identifier and a + * valid source (the valid values for sources are configured with + * orcid.validation.organization.identifier-sources) + */ + private List validate(DisambiguatedOrganization disambiguatedOrganization) { + + List errors = new ArrayList(); + + + if (disambiguatedOrganization == null) { + errors.add(DISAMBIGUATED_ORGANIZATION_REQUIRED); + return errors; + } + + if (isBlank(disambiguatedOrganization.getDisambiguatedOrganizationIdentifier())) { + errors.add(DISAMBIGUATED_ORGANIZATION_VALUE_REQUIRED); + } + + String disambiguationSource = disambiguatedOrganization.getDisambiguationSource(); + + if (isBlank(disambiguationSource)) { + errors.add(DISAMBIGUATION_SOURCE_REQUIRED); + } else if (isInvalidDisambiguationSource(disambiguationSource)) { + errors.add(DISAMBIGUATION_SOURCE_INVALID); + } + + return errors; + } + + /** + * An organization address is valid if it has a city and a country. + */ + private List validate(OrganizationAddress address) { + List errors = new ArrayList(); + + if (address == null) { + errors.add(ORGANIZATION_ADDRESS_REQUIRED); + return errors; + } + + if (isBlank(address.getCity())) { + errors.add(ORGANIZATION_CITY_REQUIRED); + } + + if (address.getCountry() == null) { + errors.add(ORGANIZATION_COUNTRY_REQUIRED); + } + + return errors; + } + + private boolean isYearNotValid(PublicationDate publicationDate) { + Year year = publicationDate.getYear(); + if (year == null) { + return true; + } + + try { + return Integer.valueOf(year.getValue()) < 1900; + } catch (NumberFormatException ex) { + return true; + } + } + + private boolean isInvalidDisambiguationSource(String disambiguationSource) { + return !contains(getDisambiguedOrganizationSources(), disambiguationSource); + } + + private String[] getDisambiguedOrganizationSources() { + return configurationService.getArrayProperty("orcid.validation.organization.identifier-sources"); + } + + private boolean isWorkValidationEnabled() { + return configurationService.getBooleanProperty("orcid.validation.work.enabled", true); + } + + private boolean isFundingValidationEnabled() { + return configurationService.getBooleanProperty("orcid.validation.funding.enabled", true); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPush.java b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPush.java new file mode 100644 index 000000000000..0e6f856bfcee --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPush.java @@ -0,0 +1,331 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.script; + +import static org.apache.commons.lang3.StringUtils.isNotEmpty; +import static org.dspace.profile.OrcidSynchronizationMode.BATCH; +import static org.dspace.profile.OrcidSynchronizationMode.MANUAL; + +import java.sql.SQLException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.exception.OrcidValidationException; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.profile.OrcidSynchronizationMode; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Script that perform the bulk synchronization with ORCID registry of all the + * ORCID queue records that has an profileItem that configure the + * synchronization mode equals to BATCH. + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidBulkPush extends DSpaceRunnable> { + + private static final Logger LOGGER = LoggerFactory.getLogger(OrcidBulkPush.class); + + private OrcidQueueService orcidQueueService; + + private OrcidHistoryService orcidHistoryService; + + private OrcidSynchronizationService orcidSynchronizationService; + + private ConfigurationService configurationService; + + private Context context; + + /** + * Cache that stores the synchronization mode set for a specific profile item. + */ + private Map synchronizationModeByProfileItem = new HashMap<>(); + + private boolean ignoreMaxAttempts = false; + + @Override + public void setup() throws ParseException { + OrcidServiceFactory orcidServiceFactory = OrcidServiceFactory.getInstance(); + this.orcidQueueService = orcidServiceFactory.getOrcidQueueService(); + this.orcidHistoryService = orcidServiceFactory.getOrcidHistoryService(); + this.orcidSynchronizationService = orcidServiceFactory.getOrcidSynchronizationService(); + this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + if (commandLine.hasOption('f')) { + ignoreMaxAttempts = true; + } + + } + + @Override + public void internalRun() throws Exception { + + if (isOrcidSynchronizationDisabled()) { + handler.logWarning("The ORCID synchronization is disabled. The script cannot proceed"); + return; + } + + context = new Context(); + assignCurrentUserInContext(); + + try { + context.turnOffAuthorisationSystem(); + performBulkSynchronization(); + context.complete(); + } catch (Exception e) { + handler.handleException(e); + context.abort(); + } finally { + context.restoreAuthSystemState(); + } + } + + /** + * Find all the Orcid Queue records that need to be synchronized and perfom the + * synchronization. + */ + private void performBulkSynchronization() throws SQLException { + + List queueRecords = findQueueRecordsToSynchronize(); + handler.logInfo("Found " + queueRecords.size() + " queue records to synchronize with ORCID"); + + for (OrcidQueue queueRecord : queueRecords) { + performSynchronization(queueRecord); + } + + } + + /** + * Returns all the stored Orcid Queue records (ignoring or not the max attempts) + * related to a profile that has the synchronization mode set to BATCH. + */ + private List findQueueRecordsToSynchronize() throws SQLException { + return findQueueRecords().stream() + .filter(record -> getProfileItemSynchronizationMode(record.getProfileItem()) == BATCH) + .collect(Collectors.toList()); + } + + /** + * If the current script execution is configued to ignore the max attemps, + * returns all the ORCID Queue records, otherwise returns the ORCID Queue + * records that has an attempts value less than the configured max attempts + * value. + */ + private List findQueueRecords() throws SQLException { + if (ignoreMaxAttempts) { + return orcidQueueService.findAll(context); + } else { + int attempts = configurationService.getIntProperty("orcid.bulk-synchronization.max-attempts"); + return orcidQueueService.findByAttemptsLessThan(context, attempts); + } + } + + /** + * Try to synchronize the given queue record with ORCID, handling any errors. + */ + private void performSynchronization(OrcidQueue queueRecord) { + + try { + + queueRecord = reload(queueRecord); + + handler.logInfo(getOperationInfoMessage(queueRecord)); + + OrcidHistory orcidHistory = orcidHistoryService.synchronizeWithOrcid(context, queueRecord, false); + + handler.logInfo(getSynchronizationResultMessage(orcidHistory)); + + commitTransaction(); + + } catch (OrcidValidationException ex) { + rollbackTransaction(); + handler.logError(getValidationErrorMessage(ex)); + } catch (Exception ex) { + rollbackTransaction(); + String errorMessage = getUnexpectedErrorMessage(ex); + LOGGER.error(errorMessage, ex); + handler.logError(errorMessage); + } finally { + incrementAttempts(queueRecord); + } + + } + + /** + * Returns the Synchronization mode related to the given profile item. + */ + private OrcidSynchronizationMode getProfileItemSynchronizationMode(Item profileItem) { + OrcidSynchronizationMode synchronizationMode = synchronizationModeByProfileItem.get(profileItem); + if (synchronizationMode == null) { + synchronizationMode = orcidSynchronizationService.getSynchronizationMode(profileItem).orElse(MANUAL); + synchronizationModeByProfileItem.put(profileItem, synchronizationMode); + } + return synchronizationMode; + } + + /** + * Returns an info log message with the details of the given record's operation. + * This message is logged before ORCID synchronization. + */ + private String getOperationInfoMessage(OrcidQueue record) { + + UUID profileItemId = record.getProfileItem().getID(); + String putCode = record.getPutCode(); + String type = record.getRecordType(); + + if (record.getOperation() == null) { + return "Synchronization of " + type + " data for profile with ID: " + profileItemId; + } + + switch (record.getOperation()) { + case INSERT: + return "Addition of " + type + " for profile with ID: " + profileItemId; + case UPDATE: + return "Update of " + type + " for profile with ID: " + profileItemId + " by put code " + putCode; + case DELETE: + return "Deletion of " + type + " for profile with ID: " + profileItemId + " by put code " + putCode; + default: + return "Synchronization of " + type + " data for profile with ID: " + profileItemId; + } + + } + + /** + * Returns an info log message with the details of the synchronization result. + * This message is logged after ORCID synchronization. + */ + private String getSynchronizationResultMessage(OrcidHistory orcidHistory) { + + String message = "History record created with status " + orcidHistory.getStatus(); + + switch (orcidHistory.getStatus()) { + case 201: + case 200: + case 204: + message += ". The operation was completed successfully"; + break; + case 400: + message += ". The resource sent to ORCID registry is not valid"; + break; + case 404: + message += ". The resource does not exists anymore on the ORCID registry"; + break; + case 409: + message += ". The resource is already present on the ORCID registry"; + break; + case 500: + message += ". An internal server error on ORCID registry side occurs"; + break; + default: + message += ". Details: " + orcidHistory.getResponseMessage(); + break; + } + + return message; + + } + + private String getValidationErrorMessage(OrcidValidationException ex) { + return ex.getMessage(); + } + + private String getUnexpectedErrorMessage(Exception ex) { + return "An unexpected error occurs during the synchronization: " + getRootMessage(ex); + } + + private void incrementAttempts(OrcidQueue queueRecord) { + queueRecord = reload(queueRecord); + if (queueRecord == null) { + return; + } + + try { + queueRecord.setAttempts(queueRecord.getAttempts() != null ? queueRecord.getAttempts() + 1 : 1); + orcidQueueService.update(context, queueRecord); + commitTransaction(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + + } + + /** + * This method will assign the currentUser to the {@link Context}. The instance + * of the method in this class will fetch the EPersonIdentifier from this class, + * this identifier was given to this class upon instantiation, it'll then be + * used to find the {@link EPerson} associated with it and this {@link EPerson} + * will be set as the currentUser of the created {@link Context} + */ + private void assignCurrentUserInContext() throws SQLException { + UUID uuid = getEpersonIdentifier(); + if (uuid != null) { + EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid); + context.setCurrentUser(ePerson); + } + } + + private OrcidQueue reload(OrcidQueue queueRecord) { + try { + return context.reloadEntity(queueRecord); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private void commitTransaction() { + try { + context.commit(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private void rollbackTransaction() { + try { + context.rollback(); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private String getRootMessage(Exception ex) { + String message = ExceptionUtils.getRootCauseMessage(ex); + return isNotEmpty(message) ? message.substring(message.indexOf(":") + 1).trim() : "Generic error"; + } + + private boolean isOrcidSynchronizationDisabled() { + return !configurationService.getBooleanProperty("orcid.synchronization-enabled", true); + } + + @Override + @SuppressWarnings("unchecked") + public OrcidBulkPushScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager().getServiceByName("orcid-bulk-push", + OrcidBulkPushScriptConfiguration.class); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java new file mode 100644 index 000000000000..88a1033eca5f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.script; + +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * Script configuration for {@link OrcidBulkPush}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + * @param the OrcidBulkPush type + */ +public class OrcidBulkPushScriptConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "force", false, "force the synchronization ignoring maximum attempts"); + options.getOption("f").setType(boolean.class); + options.getOption("f").setRequired(false); + + super.options = options; + } + return options; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/MetadataSignatureGenerator.java b/dspace-api/src/main/java/org/dspace/orcid/service/MetadataSignatureGenerator.java new file mode 100644 index 000000000000..28a270faa760 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/MetadataSignatureGenerator.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; + +/** + * Interface that mark classes that can be used to generate a signature for + * metadata values. The signature must be a unique identification of a metadata, + * based on the attributes that compose it (such as field, value and authority). + * It is possible to generate a signature for a single metadata value and also + * for a list of values. Given an item, a signature can for example be used to + * check if the associated metadata is present in the item. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface MetadataSignatureGenerator { + + /** + * Generate a signature related to the given metadata values. + * + * @param context the DSpace context + * @param metadataValues the metadata values to sign + * @return the generated signature + */ + public String generate(Context context, List metadataValues); + + /** + * Returns the metadata values traceable by the given item related with the + * given signature. + * + * @param context the DSpace context + * @param item the item + * @param signature the metadata signature + * @return the founded metadata + */ + public List findBySignature(Context context, Item item, String signature); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidEntityFactoryService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidEntityFactoryService.java new file mode 100644 index 000000000000..78f2c1331d57 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidEntityFactoryService.java @@ -0,0 +1,32 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.orcid.jaxb.model.v3.release.record.Activity; + +/** + * Interface that mark classes that handle the configured instance of + * {@link OrcidEntityFactory}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidEntityFactoryService { + + /** + * Builds an ORCID Activity object starting from the given item. The actual type + * of Activity constructed depends on the entity type of the input item. + * + * @param context the DSpace context + * @param item the item + * @return the created object + */ + Activity createOrcidObject(Context context, Item item); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidHistoryService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidHistoryService.java new file mode 100644 index 000000000000..13e1a52b6f13 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidHistoryService.java @@ -0,0 +1,152 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import java.sql.SQLException; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.exception.OrcidValidationException; + +/** + * Interface of service to manage OrcidHistory. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +public interface OrcidHistoryService { + + /** + * Get an OrcidHistory from the database. + * + * @param context DSpace context object + * @param id ID of the OrcidHistory + * @return the OrcidHistory format, or null if the ID is invalid. + * @throws SQLException if database error + */ + public OrcidHistory find(Context context, int id) throws SQLException; + + /** + * Find all the ORCID history records. + * + * @param context DSpace context object + * @return the ORCID history records + * @throws SQLException if an SQL error occurs + */ + public List findAll(Context context) throws SQLException; + + /** + * Get the OrcidHistory records where the given item is the profile item OR the + * entity + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidHistory entities + * @throws SQLException if database error + */ + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + + /** + * Find the OrcidHistory records related to the given entity item. + * + * @param context DSpace context object + * @param entity the entity item + * @return the found put codes + * @throws SQLException if database error + */ + public List findByEntity(Context context, Item entity) throws SQLException; + + /** + * Create a new OrcidHistory records related to the given profileItem and entity + * items. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the created orcid history record + * @throws SQLException if database error + */ + public OrcidHistory create(Context context, Item profileItem, Item entity) throws SQLException; + + /** + * Delete an OrcidHistory + * + * @param context context + * @param orcidHistory the OrcidHistory entity to delete + * @throws SQLException if database error + */ + public void delete(Context context, OrcidHistory orcidHistory) throws SQLException; + + /** + * Update the OrcidHistory + * + * @param context context + * @param orcidHistory the OrcidHistory entity to update + * @throws SQLException if database error + */ + public void update(Context context, OrcidHistory orcidHistory) throws SQLException; + + /** + * Find the last put code related to the given profileItem and entity item. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the found put code, if any + * @throws SQLException if database error + */ + public Optional findLastPutCode(Context context, Item profileItem, Item entity) throws SQLException; + + /** + * Find all the last put code related to the entity item each associated with + * the profileItem to which it refers. + * + * @param context DSpace context object + * @param entity the entity item + * @return a map that relates the profileItems with the identified + * putCode + * @throws SQLException if database error + */ + public Map findLastPutCodes(Context context, Item entity) throws SQLException; + + /** + * Find all the successfully Orcid history records with the given record type + * related to the given entity. An history record is considered successful if + * the status is between 200 and 300. + * + * @param context DSpace context object + * @param entity the entity item + * @param recordType the record type + * @return the found orcid history records + * @throws SQLException if database error + */ + List findSuccessfullyRecordsByEntityAndType(Context context, Item entity, String recordType) + throws SQLException; + + /** + * Synchronize the entity related to the given orcidQueue record with ORCID. + * + * @param context DSpace context object + * @param orcidQueue the orcid queue record that has the + * references of the data to be synchronized + * @param forceAddition to force the insert on the ORCID registry + * @return the created orcid history record with the + * synchronization result + * @throws SQLException if database error + * @throws OrcidValidationException if the data to synchronize with ORCID is not + * valid + */ + public OrcidHistory synchronizeWithOrcid(Context context, OrcidQueue orcidQueue, boolean forceAddition) + throws SQLException, OrcidValidationException; + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidProfileSectionFactoryService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidProfileSectionFactoryService.java new file mode 100644 index 000000000000..603d33ddf5c2 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidProfileSectionFactoryService.java @@ -0,0 +1,55 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import java.util.List; +import java.util.Optional; + +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.factory.OrcidProfileSectionFactory; +import org.dspace.profile.OrcidProfileSyncPreference; + +/** + * Interface that mark classes that handle the configured instance of + * {@link OrcidProfileSectionFactory}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidProfileSectionFactoryService { + + /** + * Returns the profile section factory of the given type. + * + * @param type the type of the section configurations to retrieve + * @return the section configurations of the given type + */ + Optional findBySectionType(OrcidProfileSectionType type); + + /** + * Returns all the profile section configurations relative to the given + * preferences. + * + * @param preferences the preferences to search for + * @return the section configurations + */ + List findByPreferences(List preferences); + + /** + * Builds an ORCID object starting from the given metadata values compliance to + * the given profile section type. + * + * @param context the DSpace context + * @param metadataValues the metadata values + * @param type the profile section type + * @return the created object + */ + Object createOrcidObject(Context context, List metadataValues, OrcidProfileSectionType type); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java new file mode 100644 index 000000000000..8de25e9caf1e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidQueueService.java @@ -0,0 +1,260 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.profile.OrcidEntitySyncPreference; + +/** + * Service that handles ORCID queue records. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidQueueService { + + /** + * Create an OrcidQueue record with the given profileItem and entity. The type + * of operation is calculated based on whether or not the given entity was + * already pushed to the ORCID registry. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + public OrcidQueue create(Context context, Item profileItem, Item entity) throws SQLException; + + /** + * Create an OrcidQueue record with the given profileItem and entity to push new + * data to ORCID. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + public OrcidQueue createEntityInsertionRecord(Context context, Item profileItem, Item entity) throws SQLException; + + /** + * Create an OrcidQueue record with the given profileItem to update a record on + * ORCID with the given putCode. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @param putCode the putCode related to the given entity item + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + public OrcidQueue createEntityUpdateRecord(Context context, Item profileItem, Item entity, String putCode) + throws SQLException; + + /** + * Create an OrcidQueue record with the given profileItem to delete a record on + * ORCID related to the given entity type with the given putCode. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param description the orcid queue record description + * @param type the type of the entity item + * @param putCode the putCode related to the given entity item + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + OrcidQueue createEntityDeletionRecord(Context context, Item profileItem, String description, String type, + String putCode) + throws SQLException; + + /** + * Create an OrcidQueue record with the profile to add data to ORCID. + * + * @param context DSpace context object + * @param profile the profile item + * @param description the record description + * @param recordType the record type + * @param metadata the metadata signature + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + OrcidQueue createProfileInsertionRecord(Context context, Item profile, String description, String recordType, + String metadata) throws SQLException; + + /** + * Create an OrcidQueue record with the profile to remove data from ORCID. + * + * @param context DSpace context object + * @param profile the profile item + * @param description the record description + * @param recordType the record type + * @param putCode the putCode + * @return the stored record + * @throws SQLException if an SQL error occurs + */ + OrcidQueue createProfileDeletionRecord(Context context, Item profile, String description, String recordType, + String metadata, String putCode) throws SQLException; + + /** + * Find all the ORCID queue records. + * + * @param context DSpace context object + * @return the ORCID queue records + * @throws SQLException if an SQL error occurs + */ + public List findAll(Context context) throws SQLException; + + /** + * Get the orcid queue records by the profileItem id. + * + * @param context DSpace context object + * @param profileItemId the profileItem item id + * @return the orcid queue records + * @throws SQLException if an SQL error occurs + */ + public List findByProfileItemId(Context context, UUID profileItemId) throws SQLException; + + /** + * Get the orcid queue records by the profileItem id. + * + * @param context DSpace context object + * @param profileItemId the profileItem item id + * @param limit limit + * @param offset offset + * @return the orcid queue records + * @throws SQLException if an SQL error occurs + */ + public List findByProfileItemId(Context context, UUID profileItemId, Integer limit, Integer offset) + throws SQLException; + + /** + * Get the orcid queue records by the profileItem and entity. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param entity the entity item + * @return the found OrcidQueue records + * @throws SQLException if an SQL error occurs + */ + public List findByProfileItemAndEntity(Context context, Item profileItem, Item entity) + throws SQLException; + + /** + * Get the OrcidQueue records where the given item is the profileItem OR the + * entity + * + * @param context DSpace context object + * @param item the item to search for + * @return the found OrcidQueue records + * @throws SQLException if database error + */ + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException; + + /** + * Get all the OrcidQueue records with attempts less than the given attempts. + * + * @param context DSpace context object + * @param attempts the maximum value of attempts + * @return the found OrcidQueue records + * @throws SQLException if database error + */ + public List findByAttemptsLessThan(Context context, int attempts) throws SQLException; + + /** + * Returns the number of records on the OrcidQueue associated with the given + * profileItemId. + * + * @param context DSpace context object + * @param profileItemId the profileItem item id + * @return the record's count + * @throws SQLException if an SQL error occurs + */ + long countByProfileItemId(Context context, UUID profileItemId) throws SQLException; + + /** + * Delete the OrcidQueue record with the given id. + * + * @param context DSpace context object + * @param id the id of the record to be deleted + * @throws SQLException if an SQL error occurs + */ + public void deleteById(Context context, Integer id) throws SQLException; + + /** + * Delete an OrcidQueue + * + * @param context DSpace context object + * @param orcidQueue the orcidQueue record to delete + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public void delete(Context context, OrcidQueue orcidQueue) throws SQLException; + + /** + * Delete all the OrcidQueue records with the given entity and record type. + * + * @param context DSpace context object + * @param entity the entity item + * @param recordType the record type + * @throws SQLException if database error occurs + */ + public void deleteByEntityAndRecordType(Context context, Item entity, String recordType) throws SQLException; + + /** + * Delete all the OrcidQueue records with the given profileItem and record type. + * + * @param context DSpace context object + * @param profileItem the profileItem item + * @param recordType the record type + * @throws SQLException if database error occurs + */ + public void deleteByProfileItemAndRecordType(Context context, Item profileItem, String recordType) + throws SQLException; + + /** + * Get an OrcidQueue from the database. + * + * @param context DSpace context object + * @param id ID of the OrcidQueue + * @return the OrcidQueue format, or null if the ID is invalid. + * @throws SQLException if database error + */ + public OrcidQueue find(Context context, int id) throws SQLException; + + /** + * Update the OrcidQueue + * + * @param context context + * @param orcidQueue the OrcidQueue to update + * @throws SQLException if database error + */ + public void update(Context context, OrcidQueue orcidQueue) throws SQLException; + + /** + * Recalculates the ORCID queue records linked to the given profileItem as + * regards the entities of the given type. The recalculation is done based on + * the preference indicated. + * + * @param context context + * @param profileItem the profileItem + * @param entityType the entity type related to the records to recalculate + * @param preference the preference value on which to base the recalculation + * @throws SQLException if database error + */ + public void recalculateOrcidQueue(Context context, Item profileItem, OrcidEntityType entityType, + OrcidEntitySyncPreference preference) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidSynchronizationService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidSynchronizationService.java new file mode 100644 index 000000000000..575ce6811b24 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidSynchronizationService.java @@ -0,0 +1,167 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import java.sql.SQLException; +import java.util.List; +import java.util.Optional; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.profile.OrcidEntitySyncPreference; +import org.dspace.profile.OrcidProfileDisconnectionMode; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.dspace.profile.OrcidSynchronizationMode; + +/** + * Service that handle the the syncronization between a DSpace profile and the + * relative ORCID profile, if any. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +public interface OrcidSynchronizationService { + + /** + * Check if the given item is linked to an ORCID profile. + * + * @param context the relevant DSpace Context. + * @param item the item to check + * @return true if the given item is linked to ORCID + */ + boolean isLinkedToOrcid(Context context, Item item); + + /** + * Configure the given profile with the data present in the given ORCID token. + * This action is required to synchronize profile and related entities with + * ORCID. No security check is done, it is therefore the caller's responsibility + * to verify for example that the current user has permission to connect the + * profile to ORCID (if necessary). + * + * @param context the relevant DSpace Context. + * @param profile the profile to configure + * @param token the ORCID token + * @throws SQLException if a SQL error occurs during the profile update + */ + public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException; + + /** + * Disconnect the given profile from ORCID. + * + * @param context the relevant DSpace Context. + * @param profile the profile to disconnect + * @throws SQLException if a SQL error occurs during the profile update + */ + public void unlinkProfile(Context context, Item profile) throws SQLException; + + /** + * Set the synchronization preference for the given profile related to the given + * ORCID entity type. + * + * @param context the relevant DSpace Context. + * @param profile the researcher profile to update + * @param entityType the orcid entity type + * @param value the new synchronization preference value + * @return true if the value has actually been updated, + * false if the value to be set is the same as + * the one already configured + * @throws SQLException if a SQL error occurs during the profile + * update + * @throws IllegalArgumentException if the given researcher profile is no linked + * with an ORCID account + */ + public boolean setEntityPreference(Context context, Item profile, OrcidEntityType entityType, + OrcidEntitySyncPreference value) throws SQLException; + + /** + * Update the profile's synchronization preference for the given profile. + * + * @param context the relevant DSpace Context. + * @param profile the researcher profile to update + * @param values the new synchronization preference values + * @return true if the value has actually been updated, + * false if the value to be set is the same as + * the one already configured + * @throws SQLException if a SQL error occurs during the profile + * update + * @throws IllegalArgumentException if the given researcher profile is no linked + * with an ORCID account + */ + public boolean setProfilePreference(Context context, Item profile, + List values) throws SQLException; + + /** + * Set the ORCID synchronization mode for the given profile. + * + * @param context the relevant DSpace Context. + * @param profile the researcher profile to update + * @param value the new synchronization mode value + * @return true if the value has actually been updated, false if + * the value to be set is the same as the one already + * configured + * @throws SQLException if a SQL error occurs during the profile update + */ + public boolean setSynchronizationMode(Context context, Item profile, OrcidSynchronizationMode value) + throws SQLException; + + /** + * Check if the given researcher profile item is configured to synchronize the + * given item with ORCID. + * + * @param profile the researcher profile item + * @param item the entity type to check + * @return true if the given entity type can be synchronize with ORCID, + * false otherwise + */ + public boolean isSynchronizationAllowed(Item profile, Item item); + + /** + * Returns the ORCID synchronization mode configured for the given profile item. + * + * @param profile the researcher profile item + * @return the synchronization mode + */ + Optional getSynchronizationMode(Item profile); + + /** + * Returns the ORCID synchronization preference related to the given entity type + * configured for the given profile item. + * + * @param profile the researcher profile item + * @param entityType the orcid entity type + * @return the configured preference + */ + Optional getEntityPreference(Item profile, OrcidEntityType entityType); + + /** + * Returns the ORCID synchronization preferences related to the profile itself + * configured for the given profile item. + * + * @param profile the researcher profile item + * @return the synchronization mode + */ + List getProfilePreferences(Item profile); + + /** + * Returns the configuration ORCID profile's disconnection mode. If that mode is + * not configured or the configuration is wrong, the value DISABLED is returned. + * + * @return the disconnection mode + */ + OrcidProfileDisconnectionMode getDisconnectionMode(); + + /** + * Returns all the profiles with the given orcid id. + * + * @param context the relevant DSpace Context. + * @param orcid the orcid id to search for + * @return the found profile items + */ + List findProfilesByOrcid(Context context, String orcid); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/OrcidTokenService.java b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidTokenService.java new file mode 100644 index 000000000000..ead968297108 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/OrcidTokenService.java @@ -0,0 +1,92 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidToken; + +/** + * Service that handle {@link OrcidToken} entities. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OrcidTokenService { + + /** + * Creates a new OrcidToken entity for the given ePerson and accessToken. + * + * @param context the DSpace context + * @param ePerson the EPerson + * @param accessToken the access token + * @return the created entity instance + */ + public OrcidToken create(Context context, EPerson ePerson, String accessToken); + + /** + * Creates a new OrcidToken entity for the given ePerson and accessToken. + * + * @param context the DSpace context + * @param ePerson the EPerson + * @param profileItem the profile item + * @param accessToken the access token + * @return the created entity instance + */ + public OrcidToken create(Context context, EPerson ePerson, Item profileItem, String accessToken); + + /** + * Find an OrcidToken by ePerson. + * + * @param context the DSpace context + * @param ePerson the ePerson to search for + * @return the Orcid token, if any + */ + public OrcidToken findByEPerson(Context context, EPerson ePerson); + + /** + * Find an OrcidToken by profileItem. + * + * @param context the DSpace context + * @param profileItem the profile item to search for + * @return the Orcid token, if any + */ + public OrcidToken findByProfileItem(Context context, Item profileItem); + + /** + * Delete the given ORCID token entity. + * + * @param context the DSpace context + * @param orcidToken the ORCID token entity to delete + */ + public void delete(Context context, OrcidToken orcidToken); + + /** + * Delete all the ORCID token entities. + * + * @param context the DSpace context + */ + public void deleteAll(Context context); + + /** + * Deletes the ORCID token entity related to the given EPerson. + * + * @param context the DSpace context + * @param ePerson the ePerson for the deletion + */ + public void deleteByEPerson(Context context, EPerson ePerson); + + /** + * Deletes the ORCID token entity related to the given profile item. + * + * @param context the DSpace context + * @param profileItem the item for the deletion + */ + public void deleteByProfileItem(Context context, Item profileItem); +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidEntityFactoryServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidEntityFactoryServiceImpl.java new file mode 100644 index 000000000000..c02185b4301a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidEntityFactoryServiceImpl.java @@ -0,0 +1,62 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import static java.util.stream.Collectors.toMap; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; + +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.factory.OrcidEntityFactory; +import org.dspace.orcid.service.OrcidEntityFactoryService; +import org.orcid.jaxb.model.v3.release.record.Activity; + +/** + * Implementation of {@link OrcidEntityFactoryService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidEntityFactoryServiceImpl implements OrcidEntityFactoryService { + + /** + * Message of the exception thrown if the given item is not a valid entity for + * ORCID (defined with the entityFactories map). + */ + private final String INVALID_ENTITY_MSG = "The item with id %s is not a configured Orcid entity"; + + private final Map entityFactories; + + private final ItemService itemService; + + private OrcidEntityFactoryServiceImpl(List entityFactories, ItemService itemService) { + this.itemService = itemService; + this.entityFactories = entityFactories.stream() + .collect(toMap(OrcidEntityFactory::getEntityType, Function.identity())); + } + + @Override + public Activity createOrcidObject(Context context, Item item) { + OrcidEntityFactory factory = getOrcidEntityType(item) + .map(entityType -> entityFactories.get(entityType)) + .orElseThrow(() -> new IllegalArgumentException(String.format(INVALID_ENTITY_MSG, item.getID()))); + + return factory.createOrcidObject(context, item); + } + + private Optional getOrcidEntityType(Item item) { + return Optional.ofNullable(OrcidEntityType.fromEntityType(itemService.getEntityTypeLabel(item))); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidHistoryServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidHistoryServiceImpl.java new file mode 100644 index 000000000000..0bec9a12e0ea --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidHistoryServiceImpl.java @@ -0,0 +1,360 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import static java.lang.String.format; +import static java.util.Comparator.comparing; +import static java.util.Comparator.naturalOrder; +import static java.util.Comparator.nullsFirst; +import static java.util.Optional.ofNullable; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.math.NumberUtils.isCreatable; + +import java.sql.SQLException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpStatus; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidResponse; +import org.dspace.orcid.dao.OrcidHistoryDAO; +import org.dspace.orcid.dao.OrcidQueueDAO; +import org.dspace.orcid.exception.OrcidClientException; +import org.dspace.orcid.exception.OrcidValidationException; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.validator.OrcidValidationError; +import org.dspace.orcid.model.validator.OrcidValidator; +import org.dspace.orcid.service.MetadataSignatureGenerator; +import org.dspace.orcid.service.OrcidEntityFactoryService; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidProfileSectionFactoryService; +import org.dspace.orcid.service.OrcidTokenService; +import org.orcid.jaxb.model.v3.release.record.Activity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidHistoryService}. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidHistoryServiceImpl implements OrcidHistoryService { + + private static final Logger LOGGER = LoggerFactory.getLogger(OrcidHistoryServiceImpl.class); + + @Autowired + private OrcidHistoryDAO orcidHistoryDAO; + + @Autowired + private OrcidQueueDAO orcidQueueDAO; + + @Autowired + private ItemService itemService; + + @Autowired + private OrcidProfileSectionFactoryService profileFactoryService; + + @Autowired + private OrcidEntityFactoryService activityFactoryService; + + @Autowired + private MetadataSignatureGenerator metadataSignatureGenerator; + + @Autowired + private OrcidClient orcidClient; + + @Autowired + private OrcidValidator orcidValidator; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Override + public OrcidHistory find(Context context, int id) throws SQLException { + return orcidHistoryDAO.findByID(context, OrcidHistory.class, id); + } + + @Override + public List findAll(Context context) throws SQLException { + return orcidHistoryDAO.findAll(context, OrcidHistory.class); + } + + @Override + public List findByProfileItemOrEntity(Context context, Item profileItem) throws SQLException { + return orcidHistoryDAO.findByProfileItemOrEntity(context, profileItem); + } + + @Override + public OrcidHistory create(Context context, Item profileItem, Item entity) throws SQLException { + OrcidHistory orcidHistory = new OrcidHistory(); + orcidHistory.setEntity(entity); + orcidHistory.setProfileItem(profileItem); + return orcidHistoryDAO.create(context, orcidHistory); + } + + @Override + public void delete(Context context, OrcidHistory orcidHistory) throws SQLException { + orcidHistoryDAO.delete(context, orcidHistory); + } + + @Override + public void update(Context context, OrcidHistory orcidHistory) throws SQLException { + if (orcidHistory != null) { + orcidHistoryDAO.save(context, orcidHistory); + } + } + + @Override + public Optional findLastPutCode(Context context, Item profileItem, Item entity) throws SQLException { + List records = orcidHistoryDAO.findByProfileItemAndEntity(context, profileItem.getID(), + entity.getID()); + return findLastPutCode(records, profileItem); + } + + @Override + public Map findLastPutCodes(Context context, Item entity) throws SQLException { + Map profileItemAndPutCodeMap = new HashMap(); + + List orcidHistoryRecords = findByEntity(context, entity); + for (OrcidHistory orcidHistoryRecord : orcidHistoryRecords) { + Item profileItem = orcidHistoryRecord.getProfileItem(); + if (profileItemAndPutCodeMap.containsKey(profileItem)) { + continue; + } + + findLastPutCode(orcidHistoryRecords, profileItem) + .ifPresent(putCode -> profileItemAndPutCodeMap.put(profileItem, putCode)); + } + + return profileItemAndPutCodeMap; + } + + @Override + public List findByEntity(Context context, Item entity) throws SQLException { + return orcidHistoryDAO.findByEntity(context, entity); + } + + @Override + public List findSuccessfullyRecordsByEntityAndType(Context context, + Item entity, String recordType) throws SQLException { + return orcidHistoryDAO.findSuccessfullyRecordsByEntityAndType(context, entity, recordType); + } + + @Override + public OrcidHistory synchronizeWithOrcid(Context context, OrcidQueue orcidQueue, boolean forceAddition) + throws SQLException { + + Item profileItem = orcidQueue.getProfileItem(); + + String orcid = getMetadataValue(profileItem, "person.identifier.orcid") + .orElseThrow(() -> new IllegalArgumentException( + format("The related profileItem item (id = %s) does not have an orcid", profileItem.getID()))); + + String token = getAccessToken(context, profileItem) + .orElseThrow(() -> new IllegalArgumentException( + format("The related profileItem item (id = %s) does not have an access token", profileItem.getID()))); + + OrcidOperation operation = calculateOperation(orcidQueue, forceAddition); + + try { + + OrcidResponse response = synchronizeWithOrcid(context, orcidQueue, orcid, token, operation); + OrcidHistory orcidHistory = createHistoryRecordFromOrcidResponse(context, orcidQueue, operation, response); + orcidQueueDAO.delete(context, orcidQueue); + return orcidHistory; + + } catch (OrcidValidationException ex) { + throw ex; + } catch (OrcidClientException ex) { + LOGGER.error("An error occurs during the orcid synchronization of ORCID queue " + orcidQueue, ex); + return createHistoryRecordFromOrcidError(context, orcidQueue, operation, ex); + } catch (RuntimeException ex) { + LOGGER.warn("An unexpected error occurs during the orcid synchronization of ORCID queue " + orcidQueue, ex); + return createHistoryRecordFromGenericError(context, orcidQueue, operation, ex); + } + + } + + private OrcidResponse synchronizeWithOrcid(Context context, OrcidQueue orcidQueue, String orcid, String token, + OrcidOperation operation) throws SQLException { + if (isProfileSectionType(orcidQueue)) { + return synchronizeProfileDataWithOrcid(context, orcidQueue, orcid, token, operation); + } else if (isEntityType(orcidQueue)) { + return synchronizeEntityWithOrcid(context, orcidQueue, orcid, token, operation); + } else { + throw new IllegalArgumentException("The type of the given queue record could not be determined"); + } + } + + private OrcidOperation calculateOperation(OrcidQueue orcidQueue, boolean forceAddition) { + OrcidOperation operation = orcidQueue.getOperation(); + if (operation == null) { + throw new IllegalArgumentException("The orcid queue record with id " + orcidQueue.getID() + + " has no operation defined"); + } + return operation != OrcidOperation.DELETE && forceAddition ? OrcidOperation.INSERT : operation; + } + + private OrcidResponse synchronizeEntityWithOrcid(Context context, OrcidQueue orcidQueue, + String orcid, String token, OrcidOperation operation) throws SQLException { + if (operation == OrcidOperation.DELETE) { + return deleteEntityOnOrcid(context, orcid, token, orcidQueue); + } else { + return sendEntityToOrcid(context, orcid, token, orcidQueue, operation == OrcidOperation.UPDATE); + } + } + + private OrcidResponse synchronizeProfileDataWithOrcid(Context context, OrcidQueue orcidQueue, + String orcid, String token, OrcidOperation operation) throws SQLException { + + if (operation == OrcidOperation.INSERT) { + return sendProfileDataToOrcid(context, orcid, token, orcidQueue); + } else { + return deleteProfileDataOnOrcid(context, orcid, token, orcidQueue); + } + + } + + private OrcidResponse sendEntityToOrcid(Context context, String orcid, String token, OrcidQueue orcidQueue, + boolean toUpdate) { + + Activity activity = activityFactoryService.createOrcidObject(context, orcidQueue.getEntity()); + + List validationErrors = orcidValidator.validate(activity); + if (CollectionUtils.isNotEmpty(validationErrors)) { + throw new OrcidValidationException(validationErrors); + } + + if (toUpdate) { + activity.setPutCode(getPutCode(orcidQueue)); + return orcidClient.update(token, orcid, activity, orcidQueue.getPutCode()); + } else { + return orcidClient.push(token, orcid, activity); + } + + } + + private OrcidResponse sendProfileDataToOrcid(Context context, String orcid, String token, OrcidQueue orcidQueue) { + + OrcidProfileSectionType recordType = OrcidProfileSectionType.fromString(orcidQueue.getRecordType()); + String signature = orcidQueue.getMetadata(); + Item person = orcidQueue.getEntity(); + + List metadataValues = metadataSignatureGenerator.findBySignature(context, person, signature); + Object orcidObject = profileFactoryService.createOrcidObject(context, metadataValues, recordType); + + List validationErrors = orcidValidator.validate(orcidObject); + if (CollectionUtils.isNotEmpty(validationErrors)) { + throw new OrcidValidationException(validationErrors); + } + + return orcidClient.push(token, orcid, orcidObject); + } + + private OrcidResponse deleteProfileDataOnOrcid(Context context, String orcid, String token, OrcidQueue orcidQueue) { + OrcidProfileSectionType recordType = OrcidProfileSectionType.fromString(orcidQueue.getRecordType()); + return orcidClient.deleteByPutCode(token, orcid, orcidQueue.getPutCode(), recordType.getPath()); + } + + private OrcidResponse deleteEntityOnOrcid(Context context, String orcid, String token, OrcidQueue orcidQueue) { + OrcidEntityType recordType = OrcidEntityType.fromEntityType(orcidQueue.getRecordType()); + return orcidClient.deleteByPutCode(token, orcid, orcidQueue.getPutCode(), recordType.getPath()); + } + + private OrcidHistory createHistoryRecordFromGenericError(Context context, OrcidQueue orcidQueue, + OrcidOperation operation, RuntimeException ex) throws SQLException { + return create(context, orcidQueue, ex.getMessage(), operation, 500, null); + } + + private OrcidHistory createHistoryRecordFromOrcidError(Context context, OrcidQueue orcidQueue, + OrcidOperation operation, OrcidClientException ex) throws SQLException { + return create(context, orcidQueue, ex.getMessage(), operation, ex.getStatus(), null); + } + + private OrcidHistory createHistoryRecordFromOrcidResponse(Context context, OrcidQueue orcidQueue, + OrcidOperation operation, OrcidResponse orcidResponse) throws SQLException { + + int status = orcidResponse.getStatus(); + if (operation == OrcidOperation.DELETE && orcidResponse.isNotFoundStatus()) { + status = HttpStatus.SC_NO_CONTENT; + } + + return create(context, orcidQueue, orcidResponse.getContent(), operation, status, orcidResponse.getPutCode()); + } + + private OrcidHistory create(Context context, OrcidQueue orcidQueue, String responseMessage, + OrcidOperation operation, int status, String putCode) throws SQLException { + OrcidHistory history = new OrcidHistory(); + history.setEntity(orcidQueue.getEntity()); + history.setProfileItem(orcidQueue.getProfileItem()); + history.setResponseMessage(responseMessage); + history.setStatus(status); + history.setPutCode(putCode); + history.setRecordType(orcidQueue.getRecordType()); + history.setMetadata(orcidQueue.getMetadata()); + history.setOperation(operation); + history.setDescription(orcidQueue.getDescription()); + return orcidHistoryDAO.create(context, history); + } + + private Optional getMetadataValue(Item item, String metadataField) { + return ofNullable(itemService.getMetadataFirstValue(item, new MetadataFieldName(metadataField), Item.ANY)) + .filter(StringUtils::isNotBlank); + } + + private Optional getAccessToken(Context context, Item item) { + return ofNullable(orcidTokenService.findByProfileItem(context, item)) + .map(orcidToken -> orcidToken.getAccessToken()); + } + + private boolean isProfileSectionType(OrcidQueue orcidQueue) { + return OrcidProfileSectionType.isValid(orcidQueue.getRecordType()); + } + + private boolean isEntityType(OrcidQueue orcidQueue) { + return OrcidEntityType.isValidEntityType(orcidQueue.getRecordType()); + } + + private Optional findLastPutCode(List orcidHistoryRecords, Item profileItem) { + return orcidHistoryRecords.stream() + .filter(orcidHistoryRecord -> profileItem.equals(orcidHistoryRecord.getProfileItem())) + .sorted(comparing(OrcidHistory::getTimestamp, nullsFirst(naturalOrder())).reversed()) + .map(history -> history.getPutCode()) + .filter(putCode -> isNotBlank(putCode)) + .findFirst(); + } + + private Long getPutCode(OrcidQueue orcidQueue) { + return isCreatable(orcidQueue.getPutCode()) ? Long.valueOf(orcidQueue.getPutCode()) : null; + } + + public OrcidClient getOrcidClient() { + return orcidClient; + } + + public void setOrcidClient(OrcidClient orcidClient) { + this.orcidClient = orcidClient; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidProfileSectionFactoryServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidProfileSectionFactoryServiceImpl.java new file mode 100644 index 000000000000..fad5a6657d14 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidProfileSectionFactoryServiceImpl.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import static java.util.stream.Collectors.toMap; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.factory.OrcidProfileSectionFactory; +import org.dspace.orcid.service.OrcidProfileSectionFactoryService; +import org.dspace.profile.OrcidProfileSyncPreference; + +/** + * Implementation of {@link OrcidProfileSectionFactoryService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidProfileSectionFactoryServiceImpl implements OrcidProfileSectionFactoryService { + + private final Map sectionFactories; + + private OrcidProfileSectionFactoryServiceImpl(List sectionFactories) { + this.sectionFactories = sectionFactories.stream() + .collect(toMap(OrcidProfileSectionFactory::getProfileSectionType, Function.identity())); + } + + @Override + public Optional findBySectionType(OrcidProfileSectionType type) { + return Optional.ofNullable(this.sectionFactories.get(type)); + } + + @Override + public List findByPreferences(List preferences) { + return filterBy(configuration -> preferences.contains(configuration.getSynchronizationPreference())); + } + + @Override + public Object createOrcidObject(Context context, List metadataValues, OrcidProfileSectionType type) { + OrcidProfileSectionFactory profileSectionFactory = findBySectionType(type) + .orElseThrow(() -> new IllegalArgumentException("No ORCID profile section factory configured for " + type)); + return profileSectionFactory.create(context, metadataValues); + } + + private List filterBy(Predicate predicate) { + return sectionFactories.values().stream().filter(predicate).collect(Collectors.toList()); + } +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java new file mode 100644 index 000000000000..d3300fea6606 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidQueueServiceImpl.java @@ -0,0 +1,242 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import java.sql.SQLException; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.Relationship; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.dao.OrcidQueueDAO; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.profile.OrcidEntitySyncPreference; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidQueueService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidQueueServiceImpl implements OrcidQueueService { + + @Autowired + private OrcidQueueDAO orcidQueueDAO; + + @Autowired + private OrcidHistoryService orcidHistoryService; + + @Autowired + private ItemService itemService; + + @Autowired + private RelationshipService relationshipService; + + @Override + public List findByProfileItemId(Context context, UUID profileItemId) throws SQLException { + return orcidQueueDAO.findByProfileItemId(context, profileItemId, -1, 0); + } + + @Override + public List findByProfileItemId(Context context, UUID profileItemId, Integer limit, Integer offset) + throws SQLException { + return orcidQueueDAO.findByProfileItemId(context, profileItemId, limit, offset); + } + + @Override + public List findByProfileItemAndEntity(Context context, Item profileItem, Item entity) + throws SQLException { + return orcidQueueDAO.findByProfileItemAndEntity(context, profileItem, entity); + } + + @Override + public List findByProfileItemOrEntity(Context context, Item item) throws SQLException { + return orcidQueueDAO.findByProfileItemOrEntity(context, item); + } + + @Override + public long countByProfileItemId(Context context, UUID profileItemId) throws SQLException { + return orcidQueueDAO.countByProfileItemId(context, profileItemId); + } + + @Override + public List findAll(Context context) throws SQLException { + return orcidQueueDAO.findAll(context, OrcidQueue.class); + } + + @Override + public OrcidQueue create(Context context, Item profileItem, Item entity) throws SQLException { + Optional putCode = orcidHistoryService.findLastPutCode(context, profileItem, entity); + if (putCode.isPresent()) { + return createEntityUpdateRecord(context, profileItem, entity, putCode.get()); + } else { + return createEntityInsertionRecord(context, profileItem, entity); + } + } + + @Override + public OrcidQueue createEntityInsertionRecord(Context context, Item profileItem, Item entity) throws SQLException { + OrcidQueue orcidQueue = new OrcidQueue(); + orcidQueue.setEntity(entity); + orcidQueue.setRecordType(itemService.getEntityTypeLabel(entity)); + orcidQueue.setProfileItem(profileItem); + orcidQueue.setDescription(getMetadataValue(entity, "dc.title")); + orcidQueue.setOperation(OrcidOperation.INSERT); + return orcidQueueDAO.create(context, orcidQueue); + } + + @Override + public OrcidQueue createEntityUpdateRecord(Context context, Item profileItem, Item entity, String putCode) + throws SQLException { + OrcidQueue orcidQueue = new OrcidQueue(); + orcidQueue.setProfileItem(profileItem); + orcidQueue.setEntity(entity); + orcidQueue.setPutCode(putCode); + orcidQueue.setRecordType(itemService.getEntityTypeLabel(entity)); + orcidQueue.setDescription(getMetadataValue(entity, "dc.title")); + orcidQueue.setOperation(OrcidOperation.UPDATE); + return orcidQueueDAO.create(context, orcidQueue); + } + + @Override + public OrcidQueue createEntityDeletionRecord(Context context, Item profileItem, String description, String type, + String putCode) + throws SQLException { + OrcidQueue orcidQueue = new OrcidQueue(); + orcidQueue.setRecordType(type); + orcidQueue.setProfileItem(profileItem); + orcidQueue.setPutCode(putCode); + orcidQueue.setDescription(description); + orcidQueue.setOperation(OrcidOperation.DELETE); + return orcidQueueDAO.create(context, orcidQueue); + } + + @Override + public OrcidQueue createProfileInsertionRecord(Context context, Item profile, String description, String recordType, + String metadata) throws SQLException { + OrcidQueue orcidQueue = new OrcidQueue(); + orcidQueue.setEntity(profile); + orcidQueue.setRecordType(recordType); + orcidQueue.setProfileItem(profile); + orcidQueue.setDescription(description); + orcidQueue.setMetadata(metadata); + orcidQueue.setOperation(OrcidOperation.INSERT); + return orcidQueueDAO.create(context, orcidQueue); + } + + @Override + public OrcidQueue createProfileDeletionRecord(Context context, Item profile, String description, String recordType, + String metadata, String putCode) throws SQLException { + OrcidQueue orcidQueue = new OrcidQueue(); + orcidQueue.setEntity(profile); + orcidQueue.setRecordType(recordType); + orcidQueue.setProfileItem(profile); + orcidQueue.setDescription(description); + orcidQueue.setPutCode(putCode); + orcidQueue.setMetadata(metadata); + orcidQueue.setOperation(OrcidOperation.DELETE); + return orcidQueueDAO.create(context, orcidQueue); + } + + @Override + public void deleteById(Context context, Integer id) throws SQLException { + OrcidQueue orcidQueue = orcidQueueDAO.findByID(context, OrcidQueue.class, id); + if (orcidQueue != null) { + delete(context, orcidQueue); + } + } + + @Override + public List findByAttemptsLessThan(Context context, int attempts) throws SQLException { + return orcidQueueDAO.findByAttemptsLessThan(context, attempts); + } + + @Override + public void delete(Context context, OrcidQueue orcidQueue) throws SQLException { + orcidQueueDAO.delete(context, orcidQueue); + } + + @Override + public void deleteByEntityAndRecordType(Context context, Item entity, String recordType) throws SQLException { + List records = orcidQueueDAO.findByEntityAndRecordType(context, entity, recordType); + for (OrcidQueue record : records) { + orcidQueueDAO.delete(context, record); + } + } + + @Override + public void deleteByProfileItemAndRecordType(Context context, Item profileItem, String recordType) + throws SQLException { + List records = orcidQueueDAO.findByProfileItemAndRecordType(context, profileItem, recordType); + for (OrcidQueue record : records) { + orcidQueueDAO.delete(context, record); + } + } + + @Override + public OrcidQueue find(Context context, int id) throws SQLException { + return orcidQueueDAO.findByID(context, OrcidQueue.class, id); + } + + @Override + public void update(Context context, OrcidQueue orcidQueue) throws SQLException { + orcidQueueDAO.save(context, orcidQueue); + } + + @Override + public void recalculateOrcidQueue(Context context, Item profileItem, OrcidEntityType orcidEntityType, + OrcidEntitySyncPreference preference) throws SQLException { + + String entityType = orcidEntityType.getEntityType(); + if (preference == OrcidEntitySyncPreference.DISABLED) { + deleteByProfileItemAndRecordType(context, profileItem, entityType); + } else { + List entities = findAllEntitiesLinkableWith(context, profileItem, entityType); + for (Item entity : entities) { + create(context, profileItem, entity); + } + } + + } + + private List findAllEntitiesLinkableWith(Context context, Item profile, String entityType) { + + return findRelationshipsByItem(context, profile).stream() + .map(relationship -> getRelatedItem(relationship, profile)) + .filter(item -> entityType.equals(itemService.getEntityTypeLabel(item))) + .collect(Collectors.toList()); + + } + + private List findRelationshipsByItem(Context context, Item item) { + try { + return relationshipService.findByItem(context, item); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + private Item getRelatedItem(Relationship relationship, Item item) { + return relationship.getLeftItem().equals(item) ? relationship.getRightItem() : relationship.getLeftItem(); + } + + private String getMetadataValue(Item item, String metadatafield) { + return itemService.getMetadataFirstValue(item, new MetadataFieldName(metadatafield), Item.ANY); + } +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidSynchronizationServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidSynchronizationServiceImpl.java new file mode 100644 index 000000000000..97d832d3de82 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidSynchronizationServiceImpl.java @@ -0,0 +1,331 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import static java.time.LocalDateTime.now; +import static java.time.format.DateTimeFormatter.ISO_DATE_TIME; +import static java.util.List.of; +import static java.util.Optional.ofNullable; +import static org.apache.commons.collections.CollectionUtils.isEmpty; +import static org.apache.commons.lang3.EnumUtils.isValidEnum; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.dspace.content.Item.ANY; +import static org.dspace.profile.OrcidEntitySyncPreference.DISABLED; + +import java.sql.SQLException; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.codec.binary.StringUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.service.EPersonService; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.model.OrcidEntityType; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.profile.OrcidEntitySyncPreference; +import org.dspace.profile.OrcidProfileDisconnectionMode; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.dspace.profile.OrcidSynchronizationMode; +import org.dspace.profile.service.ResearcherProfileService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidSynchronizationService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationService { + + @Autowired + private ItemService itemService; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private EPersonService ePersonService; + + @Autowired + private SearchService searchService; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Autowired + private ResearcherProfileService researcherProfileService; + + @Override + public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException { + + EPerson ePerson = ePersonService.findByProfileItem(context, profile); + if (ePerson == null) { + throw new IllegalArgumentException( + "The given profile item is not related to any eperson. Item id: " + profile.getID()); + } + + String orcid = token.getOrcid(); + String accessToken = token.getAccessToken(); + String[] scopes = token.getScopeAsArray(); + + itemService.setMetadataSingleValue(context, profile, "person", "identifier", "orcid", null, orcid); + itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY); + for (String scope : scopes) { + itemService.addMetadata(context, profile, "dspace", "orcid", "scope", null, scope); + } + + if (isBlank(itemService.getMetadataFirstValue(profile, "dspace", "orcid", "authenticated", Item.ANY))) { + String currentDate = ISO_DATE_TIME.format(now()); + itemService.setMetadataSingleValue(context, profile, "dspace", "orcid", "authenticated", null, currentDate); + } + + setAccessToken(context, profile, ePerson, accessToken); + + EPerson ePersonByOrcid = ePersonService.findByNetid(context, orcid); + if (ePersonByOrcid == null && isBlank(ePerson.getNetid())) { + ePerson.setNetid(orcid); + updateEPerson(context, ePerson); + } + + updateItem(context, profile); + + } + + @Override + public void unlinkProfile(Context context, Item profile) throws SQLException { + + itemService.clearMetadata(context, profile, "person", "identifier", "orcid", Item.ANY); + itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY); + itemService.clearMetadata(context, profile, "dspace", "orcid", "authenticated", Item.ANY); + + orcidTokenService.deleteByProfileItem(context, profile); + + updateItem(context, profile); + + } + + @Override + public boolean setEntityPreference(Context context, Item profile, OrcidEntityType type, + OrcidEntitySyncPreference value) throws SQLException { + String metadataQualifier = "sync-" + type.name().toLowerCase() + "s"; + return updatePreferenceForSynchronizingWithOrcid(context, profile, metadataQualifier, of(value.name())); + } + + @Override + public boolean setProfilePreference(Context context, Item profile, List values) + throws SQLException { + + List valuesAsString = values.stream() + .map(OrcidProfileSyncPreference::name) + .collect(Collectors.toList()); + + return updatePreferenceForSynchronizingWithOrcid(context, profile, "sync-profile", valuesAsString); + + } + + @Override + public boolean setSynchronizationMode(Context context, Item profile, OrcidSynchronizationMode value) + throws SQLException { + + if (!isLinkedToOrcid(context, profile)) { + throw new IllegalArgumentException("The given profile cannot be configured for the ORCID " + + "synchronization because it is not linked to any ORCID account: " + + profile.getID()); + } + + String newValue = value.name(); + String oldValue = itemService.getMetadataFirstValue(profile, "dspace", "orcid", "sync-mode", Item.ANY); + + if (StringUtils.equals(oldValue, newValue)) { + return false; + } else { + itemService.setMetadataSingleValue(context, profile, "dspace", "orcid", "sync-mode", null, value.name()); + return true; + } + + } + + @Override + public boolean isSynchronizationAllowed(Item profile, Item item) { + + if (isOrcidSynchronizationDisabled()) { + return false; + } + + String entityType = itemService.getEntityTypeLabel(item); + if (entityType == null) { + return false; + } + + if (OrcidEntityType.isValidEntityType(entityType)) { + return getEntityPreference(profile, OrcidEntityType.fromEntityType(entityType)) + .filter(pref -> pref != DISABLED) + .isPresent(); + } + + if (entityType.equals(researcherProfileService.getProfileType())) { + return profile.equals(item) && !isEmpty(getProfilePreferences(profile)); + } + + return false; + + } + + @Override + public Optional getSynchronizationMode(Item item) { + return getMetadataValue(item, "dspace.orcid.sync-mode") + .map(metadataValue -> metadataValue.getValue()) + .filter(value -> isValidEnum(OrcidSynchronizationMode.class, value)) + .map(value -> OrcidSynchronizationMode.valueOf(value)); + } + + @Override + public Optional getEntityPreference(Item item, OrcidEntityType entityType) { + return getMetadataValue(item, "dspace.orcid.sync-" + entityType.name().toLowerCase() + "s") + .map(metadataValue -> metadataValue.getValue()) + .filter(value -> isValidEnum(OrcidEntitySyncPreference.class, value)) + .map(value -> OrcidEntitySyncPreference.valueOf(value)); + } + + @Override + public List getProfilePreferences(Item item) { + return getMetadataValues(item, "dspace.orcid.sync-profile") + .map(MetadataValue::getValue) + .filter(value -> isValidEnum(OrcidProfileSyncPreference.class, value)) + .map(value -> OrcidProfileSyncPreference.valueOf(value)) + .collect(Collectors.toList()); + } + + @Override + public boolean isLinkedToOrcid(Context context, Item item) { + return getOrcidAccessToken(context, item).isPresent() && getOrcid(item).isPresent(); + } + + @Override + public OrcidProfileDisconnectionMode getDisconnectionMode() { + String value = configurationService.getProperty("orcid.disconnection.allowed-users"); + if (!OrcidProfileDisconnectionMode.isValid(value)) { + return OrcidProfileDisconnectionMode.DISABLED; + } + return OrcidProfileDisconnectionMode.fromString(value); + } + + private void setAccessToken(Context context, Item profile, EPerson ePerson, String accessToken) { + OrcidToken orcidToken = orcidTokenService.findByEPerson(context, ePerson); + if (orcidToken == null) { + orcidTokenService.create(context, ePerson, profile, accessToken); + } else { + orcidToken.setProfileItem(profile); + orcidToken.setAccessToken(accessToken); + } + } + + private boolean updatePreferenceForSynchronizingWithOrcid(Context context, Item profile, + String metadataQualifier, + List values) throws SQLException { + + if (!isLinkedToOrcid(context, profile)) { + throw new IllegalArgumentException("The given profile cannot be configured for the ORCID " + + "synchronization because it is not linked to any ORCID account: " + + profile.getID()); + } + + List oldValues = itemService.getMetadata(profile, "dspace", "orcid", metadataQualifier, ANY).stream() + .map(metadataValue -> metadataValue.getValue()) + .collect(Collectors.toList()); + + if (containsSameValues(oldValues, values)) { + return false; + } + + itemService.clearMetadata(context, profile, "dspace", "orcid", metadataQualifier, ANY); + for (String value : values) { + itemService.addMetadata(context, profile, "dspace", "orcid", metadataQualifier, null, value); + } + + return true; + + } + + private boolean containsSameValues(List firstList, List secondList) { + return new HashSet<>(firstList).equals(new HashSet<>(secondList)); + } + + private Optional getOrcidAccessToken(Context context, Item item) { + return ofNullable(orcidTokenService.findByProfileItem(context, item)) + .map(orcidToken -> orcidToken.getAccessToken()); + } + + public Optional getOrcid(Item item) { + return getMetadataValue(item, "person.identifier.orcid") + .map(metadataValue -> metadataValue.getValue()); + } + + private Optional getMetadataValue(Item item, String metadataField) { + return getMetadataValues(item, metadataField).findFirst(); + } + + private Stream getMetadataValues(Item item, String metadataField) { + return item.getMetadata().stream() + .filter(metadata -> metadataField.equals(metadata.getMetadataField().toString('.'))); + } + + + private boolean isOrcidSynchronizationDisabled() { + return !configurationService.getBooleanProperty("orcid.synchronization-enabled", true); + } + + private void updateItem(Context context, Item item) throws SQLException { + try { + context.turnOffAuthorisationSystem(); + itemService.update(context, item); + } catch (AuthorizeException e) { + throw new RuntimeException(e); + } finally { + context.restoreAuthSystemState(); + } + } + + private void updateEPerson(Context context, EPerson ePerson) throws SQLException { + try { + ePersonService.update(context, ePerson); + } catch (AuthorizeException e) { + throw new RuntimeException(e); + } + } + + @Override + public List findProfilesByOrcid(Context context, String orcid) { + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + discoverQuery.addFilterQueries("search.entitytype:" + researcherProfileService.getProfileType()); + discoverQuery.addFilterQueries("person.identifier.orcid:" + orcid); + try { + return searchService.search(context, discoverQuery).getIndexableObjects().stream() + .map(object -> ((IndexableItem) object).getIndexedObject()) + .collect(Collectors.toList()); + } catch (SearchServiceException ex) { + throw new RuntimeException(ex); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidTokenServiceImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidTokenServiceImpl.java new file mode 100644 index 000000000000..bf10ea981c1b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/OrcidTokenServiceImpl.java @@ -0,0 +1,99 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.dao.OrcidTokenDAO; +import org.dspace.orcid.service.OrcidTokenService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OrcidTokenService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidTokenServiceImpl implements OrcidTokenService { + + @Autowired + private OrcidTokenDAO orcidTokenDAO; + + @Override + public OrcidToken create(Context context, EPerson ePerson, String accessToken) { + return create(context, ePerson, null, accessToken); + } + + @Override + public OrcidToken create(Context context, EPerson ePerson, Item profileItem, String accessToken) { + OrcidToken orcidToken = new OrcidToken(); + orcidToken.setAccessToken(accessToken); + orcidToken.setEPerson(ePerson); + orcidToken.setProfileItem(profileItem); + try { + return orcidTokenDAO.create(context, orcidToken); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public OrcidToken findByEPerson(Context context, EPerson ePerson) { + return orcidTokenDAO.findByEPerson(context, ePerson); + } + + @Override + public OrcidToken findByProfileItem(Context context, Item profileItem) { + return orcidTokenDAO.findByProfileItem(context, profileItem); + } + + @Override + public void delete(Context context, OrcidToken orcidToken) { + try { + orcidTokenDAO.delete(context, orcidToken); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public void deleteAll(Context context) { + try { + + List tokens = orcidTokenDAO.findAll(context, OrcidToken.class); + for (OrcidToken token : tokens) { + delete(context, token); + } + + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public void deleteByEPerson(Context context, EPerson ePerson) { + OrcidToken orcidToken = findByEPerson(context, ePerson); + if (orcidToken != null) { + delete(context, orcidToken); + } + } + + @Override + public void deleteByProfileItem(Context context, Item profileItem) { + OrcidToken orcidToken = findByProfileItem(context, profileItem); + if (orcidToken != null) { + delete(context, orcidToken); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/orcid/service/impl/PlainMetadataSignatureGeneratorImpl.java b/dspace-api/src/main/java/org/dspace/orcid/service/impl/PlainMetadataSignatureGeneratorImpl.java new file mode 100644 index 000000000000..4888462ce454 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/orcid/service/impl/PlainMetadataSignatureGeneratorImpl.java @@ -0,0 +1,94 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service.impl; + +import static java.util.Comparator.comparing; + +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.core.Context; +import org.dspace.orcid.service.MetadataSignatureGenerator; + +/** + * Implementation of {@link MetadataSignatureGenerator} that composes a + * signature made up of a section for each metadata value, divided by the + * character SIGNATURE_SECTIONS_SEPARATOR.
    + * Each section is composed of the metadata field, the metadata value and, if + * present, the authority, divided by the character METADATA_SECTIONS_SEPARATOR. + *
    + * The presence of the metadata field allows to have different signatures for + * metadata with the same values but referring to different fields, while the + * authority allows to distinguish metadata that refer to different entities, + * even if they have the same value. Finally, the various sections of the + * signature are sorted by metadata field so that the order of the input + * metadata values does not affect the signature. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class PlainMetadataSignatureGeneratorImpl implements MetadataSignatureGenerator { + + private static final String SIGNATURE_SECTIONS_SEPARATOR = "§§"; + private static final String METADATA_SECTIONS_SEPARATOR = "::"; + + @Override + public String generate(Context context, List metadataValues) { + return metadataValues.stream() + .sorted(comparing(metadataValue -> metadataValue.getMetadataField().getID())) + .map(this::composeSignatureSection) + .collect(Collectors.joining(SIGNATURE_SECTIONS_SEPARATOR)); + } + + @Override + public List findBySignature(Context context, Item item, String signature) { + return getSignatureSections(signature) + .map(signatureSection -> findFirstBySignatureSection(context, item, signatureSection)) + .flatMap(metadataValue -> metadataValue.stream()) + .collect(Collectors.toList()); + } + + private String composeSignatureSection(MetadataValue metadataValue) { + String fieldId = getField(metadataValue); + String metadataValueSignature = fieldId + METADATA_SECTIONS_SEPARATOR + getValue(metadataValue); + if (StringUtils.isNotBlank(metadataValue.getAuthority())) { + return metadataValueSignature + METADATA_SECTIONS_SEPARATOR + metadataValue.getAuthority(); + } else { + return metadataValueSignature; + } + } + + private Optional findFirstBySignatureSection(Context context, Item item, String signatureSection) { + return item.getMetadata().stream() + .filter(metadataValue -> matchSignature(context, metadataValue, signatureSection)) + .findFirst(); + } + + private boolean matchSignature(Context context, MetadataValue metadataValue, String signatureSection) { + return generate(context, List.of(metadataValue)).equals(signatureSection); + } + + private Stream getSignatureSections(String signature) { + return Arrays.stream(StringUtils.split(signature, SIGNATURE_SECTIONS_SEPARATOR)); + } + + private String getField(MetadataValue metadataValue) { + return metadataValue.getMetadataField().toString('.'); + } + + private String getValue(MetadataValue metadataValue) { + return metadataValue.getValue() != null ? metadataValue.getValue() : ""; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactory.java b/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactory.java new file mode 100644 index 000000000000..81cebb84a1d1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactory.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.passwordvalidation.factory; + +import org.dspace.authorize.service.PasswordValidatorService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Abstract factory to get services for the passwordvalidation package, + * use PasswordValidationFactory.getInstance() to retrieve an implementation. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public abstract class PasswordValidationFactory { + + public abstract PasswordValidatorService getPasswordValidationService(); + + public static PasswordValidationFactory getInstance() { + return DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServiceByName("validationPasswordFactory", PasswordValidationFactory.class); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactoryImpl.java b/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactoryImpl.java new file mode 100644 index 000000000000..a73c7f686850 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/passwordvalidation/factory/PasswordValidationFactoryImpl.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.passwordvalidation.factory; + +import org.dspace.authorize.service.PasswordValidatorService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Factory implementation to get services for the PasswordValidation package, + * use PasswordValidationFactory.getInstance() to retrieve an implementation. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class PasswordValidationFactoryImpl extends PasswordValidationFactory { + + @Autowired(required = true) + private PasswordValidatorService PasswordValidatorService; + + @Override + public PasswordValidatorService getPasswordValidationService() { + return PasswordValidatorService; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/profile/OrcidEntitySyncPreference.java b/dspace-api/src/main/java/org/dspace/profile/OrcidEntitySyncPreference.java new file mode 100644 index 000000000000..74efc57e3867 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/OrcidEntitySyncPreference.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +/** + * Enum that model the allowed values to configure the ORCID synchronization + * preferences. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidEntitySyncPreference { + + /** + * Preference to be set to disable the synchronization with ORCID of the + * specific entity. + */ + DISABLED, + + /** + * Preference to be set to enable the synchronization with ORCID of all items + * relating to the specific entity. + */ + ALL +} diff --git a/dspace-api/src/main/java/org/dspace/profile/OrcidMetadataCopyingAction.java b/dspace-api/src/main/java/org/dspace/profile/OrcidMetadataCopyingAction.java new file mode 100644 index 000000000000..36abea9ddb63 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/OrcidMetadataCopyingAction.java @@ -0,0 +1,97 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +import static java.time.LocalDateTime.now; +import static java.time.format.DateTimeFormatter.ISO_DATE_TIME; +import static org.apache.commons.collections.CollectionUtils.isNotEmpty; +import static org.dspace.content.Item.ANY; + +import java.sql.SQLException; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.commons.collections.CollectionUtils; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.service.EPersonService; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.profile.service.AfterResearcherProfileCreationAction; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.Ordered; +import org.springframework.core.annotation.Order; + +/** + * Implementation of {@link AfterResearcherProfileCreationAction} that copy the + * ORCID metadata, if any, from the owner to the researcher profile item. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Order(Ordered.HIGHEST_PRECEDENCE) +public class OrcidMetadataCopyingAction implements AfterResearcherProfileCreationAction { + + @Autowired + private ItemService itemService; + + @Autowired + private EPersonService ePersonService; + + @Autowired + private OrcidTokenService orcidTokenService; + + @Override + public void perform(Context context, ResearcherProfile researcherProfile, EPerson owner) throws SQLException { + + Item item = researcherProfile.getItem(); + + copyMetadataValues(context, owner, "eperson.orcid", item, "person.identifier.orcid"); + copyMetadataValues(context, owner, "eperson.orcid.scope", item, "dspace.orcid.scope"); + + OrcidToken orcidToken = orcidTokenService.findByEPerson(context, owner); + if (orcidToken != null) { + orcidToken.setProfileItem(item); + } + + if (isLinkedToOrcid(owner, orcidToken)) { + String currentDate = ISO_DATE_TIME.format(now()); + itemService.setMetadataSingleValue(context, item, "dspace", "orcid", "authenticated", null, currentDate); + } + + } + + private void copyMetadataValues(Context context, EPerson ePerson, String ePersonMetadataField, Item item, + String itemMetadataField) throws SQLException { + + List values = getMetadataValues(ePerson, ePersonMetadataField); + if (CollectionUtils.isEmpty(values)) { + return; + } + + MetadataFieldName metadata = new MetadataFieldName(itemMetadataField); + itemService.clearMetadata(context, item, metadata.schema, metadata.element, metadata.qualifier, ANY); + itemService.addMetadata(context, item, metadata.schema, metadata.element, metadata.qualifier, null, values); + + } + + private boolean isLinkedToOrcid(EPerson ePerson, OrcidToken orcidToken) { + return isNotEmpty(getMetadataValues(ePerson, "eperson.orcid")) && orcidToken != null; + } + + private List getMetadataValues(EPerson ePerson, String metadataField) { + return ePersonService.getMetadataByMetadataString(ePerson, metadataField).stream() + .map(MetadataValue::getValue) + .collect(Collectors.toList()); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/profile/OrcidProfileDisconnectionMode.java b/dspace-api/src/main/java/org/dspace/profile/OrcidProfileDisconnectionMode.java new file mode 100644 index 000000000000..22b13f047caa --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/OrcidProfileDisconnectionMode.java @@ -0,0 +1,49 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +import static org.apache.commons.lang3.EnumUtils.isValidEnum; + +/** + * Enum that models all the available values of the property that which + * determines which users can disconnect a profile from an ORCID account. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidProfileDisconnectionMode { + + /** + * The disconnection is disabled. + */ + DISABLED, + + /** + * Only the profile's owner can disconnect that profile from ORCID. + */ + ONLY_OWNER, + + /** + * Only the admins can disconnect profiles from ORCID. + */ + ONLY_ADMIN, + + /** + * Only the admin or the profile's owner can disconnect that profile from ORCID. + */ + ADMIN_AND_OWNER; + + public static boolean isValid(String mode) { + return mode != null ? isValidEnum(OrcidProfileDisconnectionMode.class, mode.toUpperCase()) : false; + } + + public static OrcidProfileDisconnectionMode fromString(String mode) { + return isValid(mode) ? OrcidProfileDisconnectionMode.valueOf(mode.toUpperCase()) : null; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/profile/OrcidProfileSyncPreference.java b/dspace-api/src/main/java/org/dspace/profile/OrcidProfileSyncPreference.java new file mode 100644 index 000000000000..a867694490eb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/OrcidProfileSyncPreference.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +/** + * Enum that model the allowed values to configure the ORCID synchronization + * preferences for the user's profile. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidProfileSyncPreference { + + /** + * Data relating to the name, country and keywords of the ORCID profile. + */ + BIOGRAPHICAL, + + /** + * Data relating to external identifiers and researcher urls of the ORCID + * profile. + */ + IDENTIFIERS; +} diff --git a/dspace-api/src/main/java/org/dspace/profile/OrcidSynchronizationMode.java b/dspace-api/src/main/java/org/dspace/profile/OrcidSynchronizationMode.java new file mode 100644 index 000000000000..8bc822261bab --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/OrcidSynchronizationMode.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +/** + * Enum that model the allowed values to configure the ORCID synchronization + * mode. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public enum OrcidSynchronizationMode { + + /** + * Mode in which the user can manually decide when to synchronize data with + * ORCID. + */ + MANUAL, + + /** + * Mode in which synchronizations with ORCID occur through an automatic process. + */ + BATCH; +} diff --git a/dspace-api/src/main/java/org/dspace/profile/ResearcherProfile.java b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfile.java new file mode 100644 index 000000000000..72e7dc800868 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfile.java @@ -0,0 +1,88 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +import static org.dspace.core.Constants.READ; +import static org.dspace.eperson.Group.ANONYMOUS; + +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Stream; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.util.UUIDUtils; +import org.springframework.util.Assert; + +/** + * Object representing a Researcher Profile. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResearcherProfile { + + private final Item item; + + private final MetadataValue dspaceObjectOwner; + + /** + * Create a new ResearcherProfile object from the given item. + * + * @param item the profile item + * @throws IllegalArgumentException if the given item has not a dspace.object.owner + * metadata with a valid authority + */ + public ResearcherProfile(Item item) { + Assert.notNull(item, "A researcher profile requires an item"); + this.item = item; + this.dspaceObjectOwner = getDspaceObjectOwnerMetadata(item); + } + + public UUID getId() { + return UUIDUtils.fromString(dspaceObjectOwner.getAuthority()); + } + + /** + * A profile is considered visible if accessible by anonymous users. This method + * returns true if the given item has a READ policy related to ANONYMOUS group, + * false otherwise. + */ + public boolean isVisible() { + return item.getResourcePolicies().stream() + .filter(policy -> policy.getGroup() != null) + .anyMatch(policy -> READ == policy.getAction() && ANONYMOUS.equals(policy.getGroup().getName())); + } + + public Item getItem() { + return item; + } + + public Optional getOrcid() { + return getMetadataValue(item, "person.identifier.orcid") + .map(metadataValue -> metadataValue.getValue()); + } + + private MetadataValue getDspaceObjectOwnerMetadata(Item item) { + return getMetadataValue(item, "dspace.object.owner") + .filter(metadata -> UUIDUtils.fromString(metadata.getAuthority()) != null) + .orElseThrow( + () -> new IllegalArgumentException("A profile item must have a valid dspace.object.owner metadata") + ); + } + + private Optional getMetadataValue(Item item, String metadataField) { + return getMetadataValues(item, metadataField).findFirst(); + } + + private Stream getMetadataValues(Item item, String metadataField) { + return item.getMetadata().stream() + .filter(metadata -> metadataField.equals(metadata.getMetadataField().toString('.'))); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java new file mode 100644 index 000000000000..80bbd68fd19d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/ResearcherProfileServiceImpl.java @@ -0,0 +1,388 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile; + +import static java.util.Optional.empty; +import static java.util.Optional.of; +import static java.util.Optional.ofNullable; +import static org.dspace.content.authority.Choices.CF_ACCEPTED; +import static org.dspace.core.Constants.READ; +import static org.dspace.core.Constants.WRITE; +import static org.dspace.eperson.Group.ANONYMOUS; + +import java.io.IOException; +import java.net.URI; +import java.sql.SQLException; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import javax.annotation.PostConstruct; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.dspace.app.exception.ResourceAlreadyExistsException; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.orcid.service.OrcidSynchronizationService; +import org.dspace.profile.service.AfterResearcherProfileCreationAction; +import org.dspace.profile.service.ResearcherProfileService; +import org.dspace.services.ConfigurationService; +import org.dspace.util.UUIDUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.Assert; + +/** + * Implementation of {@link ResearcherProfileService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResearcherProfileServiceImpl implements ResearcherProfileService { + + private static Logger log = LoggerFactory.getLogger(ResearcherProfileServiceImpl.class); + + @Autowired + private ItemService itemService; + + @Autowired + private WorkspaceItemService workspaceItemService; + + @Autowired + private InstallItemService installItemService; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private CollectionService collectionService; + + @Autowired + private SearchService searchService; + + @Autowired + private GroupService groupService; + + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private OrcidSynchronizationService orcidSynchronizationService; + + @Autowired(required = false) + private List afterCreationActions; + + @PostConstruct + public void postConstruct() { + + if (afterCreationActions == null) { + afterCreationActions = Collections.emptyList(); + } + + } + + @Override + public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException { + Assert.notNull(id, "An id must be provided to find a researcher profile"); + + Item profileItem = findResearcherProfileItemById(context, id); + if (profileItem == null) { + return null; + } + + return new ResearcherProfile(profileItem); + } + + @Override + public ResearcherProfile createAndReturn(Context context, EPerson ePerson) + throws AuthorizeException, SQLException, SearchServiceException { + + Item profileItem = findResearcherProfileItemById(context, ePerson.getID()); + if (profileItem != null) { + throw new ResourceAlreadyExistsException("A profile is already linked to the provided User"); + } + + Collection collection = findProfileCollection(context) + .orElseThrow(() -> new IllegalStateException("No collection found for researcher profiles")); + + context.turnOffAuthorisationSystem(); + Item item = createProfileItem(context, ePerson, collection); + context.restoreAuthSystemState(); + + ResearcherProfile researcherProfile = new ResearcherProfile(item); + + for (AfterResearcherProfileCreationAction afterCreationAction : afterCreationActions) { + afterCreationAction.perform(context, researcherProfile, ePerson); + } + + return researcherProfile; + } + + @Override + public void deleteById(Context context, UUID id) throws SQLException, AuthorizeException { + Assert.notNull(id, "An id must be provided to find a researcher profile"); + + Item profileItem = findResearcherProfileItemById(context, id); + if (profileItem == null) { + return; + } + + if (isHardDeleteEnabled()) { + deleteItem(context, profileItem); + } else { + removeOwnerMetadata(context, profileItem); + orcidSynchronizationService.unlinkProfile(context, profileItem); + } + + } + + @Override + public void changeVisibility(Context context, ResearcherProfile profile, boolean visible) + throws AuthorizeException, SQLException { + + if (profile.isVisible() == visible) { + return; + } + + Item item = profile.getItem(); + Group anonymous = groupService.findByName(context, ANONYMOUS); + + if (visible) { + authorizeService.addPolicy(context, item, READ, anonymous); + } else { + authorizeService.removeGroupPolicies(context, item, anonymous); + } + + } + + @Override + public ResearcherProfile claim(Context context, EPerson ePerson, URI uri) + throws SQLException, AuthorizeException, SearchServiceException { + + Item profileItem = findResearcherProfileItemById(context, ePerson.getID()); + if (profileItem != null) { + throw new ResourceAlreadyExistsException("A profile is already linked to the provided User"); + } + + Item item = findItemByURI(context, uri) + .orElseThrow(() -> new IllegalArgumentException("No item found by URI " + uri)); + + if (!item.isArchived() || item.isWithdrawn()) { + throw new IllegalArgumentException( + "Only archived items can be claimed to create a researcher profile. Item ID: " + item.getID()); + } + + if (!hasProfileType(item)) { + throw new IllegalArgumentException("The provided item has not a profile type. Item ID: " + item.getID()); + } + + if (haveDifferentEmail(item, ePerson)) { + throw new IllegalArgumentException("The provided item is not claimable because it has a different email " + + "than the given user's email. Item ID: " + item.getID()); + } + + String existingOwner = itemService.getMetadataFirstValue(item, "dspace", "object", "owner", Item.ANY); + + if (StringUtils.isNotBlank(existingOwner)) { + throw new IllegalArgumentException("Item with provided uri has already an owner - ID: " + existingOwner); + } + + context.turnOffAuthorisationSystem(); + itemService.addMetadata(context, item, "dspace", "object", "owner", null, + ePerson.getName(), ePerson.getID().toString(), CF_ACCEPTED); + context.restoreAuthSystemState(); + + return new ResearcherProfile(item); + } + + @Override + public boolean hasProfileType(Item item) { + String profileType = getProfileType(); + if (StringUtils.isBlank(profileType)) { + return false; + } + return profileType.equals(itemService.getEntityTypeLabel(item)); + } + + @Override + public String getProfileType() { + return configurationService.getProperty("researcher-profile.entity-type", "Person"); + } + + private Optional findItemByURI(final Context context, final URI uri) throws SQLException { + String path = uri.getPath(); + UUID uuid = UUIDUtils.fromString(path.substring(path.lastIndexOf("/") + 1)); + return ofNullable(itemService.find(context, uuid)); + } + + /** + * Search for an profile item owned by an eperson with the given id. + */ + private Item findResearcherProfileItemById(Context context, UUID id) throws SQLException, AuthorizeException { + + String profileType = getProfileType(); + + Iterator items = itemService.findByAuthorityValue(context, "dspace", "object", "owner", id.toString()); + while (items.hasNext()) { + Item item = items.next(); + String entityType = itemService.getEntityTypeLabel(item); + if (profileType.equals(entityType)) { + return item; + } + } + + return null; + } + + /** + * Returns a Profile collection based on a configuration or searching for a + * collection of researcher profile type. + */ + private Optional findProfileCollection(Context context) throws SQLException, SearchServiceException { + return findConfiguredProfileCollection(context) + .or(() -> findFirstCollectionByProfileEntityType(context)); + } + + /** + * Create a new profile item for the given ePerson in the provided collection. + */ + private Item createProfileItem(Context context, EPerson ePerson, Collection collection) + throws AuthorizeException, SQLException { + + String id = ePerson.getID().toString(); + String fullName = ePerson.getFullName(); + + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, true); + Item item = workspaceItem.getItem(); + itemService.addMetadata(context, item, "dc", "title", null, null, fullName); + itemService.addMetadata(context, item, "person", "email", null, null, ePerson.getEmail()); + itemService.addMetadata(context, item, "dspace", "object", "owner", null, fullName, id, CF_ACCEPTED); + + item = installItemService.installItem(context, workspaceItem); + + if (isNewProfileNotVisibleByDefault()) { + Group anonymous = groupService.findByName(context, ANONYMOUS); + authorizeService.removeGroupPolicies(context, item, anonymous); + } + + authorizeService.addPolicy(context, item, READ, ePerson); + authorizeService.addPolicy(context, item, WRITE, ePerson); + + return reloadItem(context, item); + } + + private Optional findConfiguredProfileCollection(Context context) throws SQLException { + UUID uuid = UUIDUtils.fromString(configurationService.getProperty("researcher-profile.collection.uuid")); + if (uuid == null) { + return Optional.empty(); + } + + Collection collection = collectionService.find(context, uuid); + if (collection == null) { + return Optional.empty(); + } + + if (isNotProfileCollection(collection)) { + log.warn("The configured researcher-profile.collection.uuid " + + "has an invalid entity type, expected " + getProfileType()); + return Optional.empty(); + } + + return of(collection); + } + + @SuppressWarnings("rawtypes") + private Optional findFirstCollectionByProfileEntityType(Context context) { + + String profileType = getProfileType(); + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.addFilterQueries("dspace.entity.type:" + profileType); + + DiscoverResult discoverResult = search(context, discoverQuery); + List indexableObjects = discoverResult.getIndexableObjects(); + + if (CollectionUtils.isEmpty(indexableObjects)) { + return empty(); + } + + return ofNullable((Collection) indexableObjects.get(0).getIndexedObject()); + } + + private boolean isHardDeleteEnabled() { + return configurationService.getBooleanProperty("researcher-profile.hard-delete.enabled"); + } + + private boolean isNewProfileNotVisibleByDefault() { + return !configurationService.getBooleanProperty("researcher-profile.set-new-profile-visible"); + } + + private boolean isNotProfileCollection(Collection collection) { + String entityType = collectionService.getMetadataFirstValue(collection, "dspace", "entity", "type", Item.ANY); + return entityType == null || !entityType.equals(getProfileType()); + } + + private boolean haveDifferentEmail(Item item, EPerson currentUser) { + return itemService.getMetadataByMetadataString(item, "person.email").stream() + .map(MetadataValue::getValue) + .filter(StringUtils::isNotBlank) + .noneMatch(email -> email.equalsIgnoreCase(currentUser.getEmail())); + } + + private void removeOwnerMetadata(Context context, Item profileItem) throws SQLException { + List metadata = itemService.getMetadata(profileItem, "dspace", "object", "owner", Item.ANY); + itemService.removeMetadataValues(context, profileItem, metadata); + } + + private Item reloadItem(Context context, Item item) throws SQLException { + context.uncacheEntity(item); + return context.reloadEntity(item); + } + + private void deleteItem(Context context, Item profileItem) throws SQLException, AuthorizeException { + try { + context.turnOffAuthorisationSystem(); + itemService.delete(context, profileItem); + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + context.restoreAuthSystemState(); + } + } + + private DiscoverResult search(Context context, DiscoverQuery discoverQuery) { + try { + return searchService.search(context, discoverQuery); + } catch (SearchServiceException e) { + throw new RuntimeException(e); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/profile/service/AfterResearcherProfileCreationAction.java b/dspace-api/src/main/java/org/dspace/profile/service/AfterResearcherProfileCreationAction.java new file mode 100644 index 000000000000..495fe59cdc26 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/service/AfterResearcherProfileCreationAction.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile.service; + +import java.sql.SQLException; + +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.profile.ResearcherProfile; + +/** + * Interface to mark classes that allow to perform additional logic on created + * researcher profile. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface AfterResearcherProfileCreationAction { + + /** + * Perform some actions on the given researcher profile and returns the updated + * profile. + * + * @param context the DSpace context + * @param researcherProfile the created researcher profile + * @param owner the EPerson that is owner of the given profile + * @throws SQLException if a SQL error occurs + */ + void perform(Context context, ResearcherProfile researcherProfile, EPerson owner) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/profile/service/ResearcherProfileService.java b/dspace-api/src/main/java/org/dspace/profile/service/ResearcherProfileService.java new file mode 100644 index 000000000000..9e52402f77e4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/profile/service/ResearcherProfileService.java @@ -0,0 +1,112 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.profile.service; + +import java.net.URI; +import java.sql.SQLException; +import java.util.UUID; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; +import org.dspace.eperson.EPerson; +import org.dspace.profile.ResearcherProfile; + +/** + * Service interface class for the {@link ResearcherProfile} object. The + * implementation of this class is responsible for all business logic calls for + * the {@link ResearcherProfile} object. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface ResearcherProfileService { + + /** + * Find the ResearcherProfile by UUID. + * + * @param context the relevant DSpace Context. + * @param id the ResearcherProfile id + * @return the found ResearcherProfile + * @throws SQLException + * @throws AuthorizeException + */ + public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException; + + /** + * Create a new researcher profile for the given ePerson. + * + * @param context the relevant DSpace Context. + * @param ePerson the ePerson + * @return the created profile + * @throws SQLException + * @throws AuthorizeException + * @throws SearchServiceException + */ + public ResearcherProfile createAndReturn(Context context, EPerson ePerson) + throws AuthorizeException, SQLException, SearchServiceException; + + /** + * Delete the profile with the given id. Based on the + * researcher-profile.hard-delete.enabled configuration, this method deletes the + * related item or removes the association between the researcher profile and + * eperson related to the input uuid. + * + * @param context the relevant DSpace Context. + * @param id the researcher profile id + * @throws AuthorizeException + * @throws SQLException + */ + public void deleteById(Context context, UUID id) throws SQLException, AuthorizeException; + + /** + * Changes the visibility of the given profile using the given new visible + * value. The visiblity controls whether the Profile is Anonymous READ or not. + * + * @param context the relevant DSpace Context. + * @param profile the researcher profile to update + * @param visible the visible value to set. If true the profile will + * be visible to all users. + * @throws SQLException + * @throws AuthorizeException + */ + public void changeVisibility(Context context, ResearcherProfile profile, boolean visible) + throws AuthorizeException, SQLException; + + /** + * Claims and links an eperson to an existing DSpaceObject + * @param context the relevant DSpace Context. + * @param ePerson the ePerson + * @param uri uri of existing Item to be linked to the + * eperson + * @return the created profile + * @throws IllegalArgumentException if the given uri is not related to an + * archived item or if the item cannot be + * claimed + */ + ResearcherProfile claim(Context context, EPerson ePerson, URI uri) + throws SQLException, AuthorizeException, SearchServiceException; + + /** + * Check if the given item has an entity type compatible with that of the + * researcher profile. If the given item does not have an entity type, the check + * returns false. + * + * @param item the item to check + * @return the check result + */ + boolean hasProfileType(Item item); + + /** + * Returns the profile entity type, if any. + * + * @return the profile type + */ + String getProfileType(); +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/QAEventsDeleteCascadeConsumer.java b/dspace-api/src/main/java/org/dspace/qaevent/QAEventsDeleteCascadeConsumer.java new file mode 100644 index 000000000000..6460c360ecbe --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/QAEventsDeleteCascadeConsumer.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.qaevent; + +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.event.Consumer; +import org.dspace.event.Event; +import org.dspace.qaevent.service.QAEventService; +import org.dspace.utils.DSpace; + +/** + * Consumer to delete qaevents from solr due to the target item deletion + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public class QAEventsDeleteCascadeConsumer implements Consumer { + + private QAEventService qaEventService; + + @Override + public void initialize() throws Exception { + qaEventService = new DSpace().getSingletonService(QAEventService.class); + } + + @Override + public void finish(Context context) throws Exception { + + } + + @Override + public void consume(Context context, Event event) throws Exception { + if (event.getEventType() == Event.DELETE) { + if (event.getSubjectType() == Constants.ITEM && event.getSubjectID() != null) { + qaEventService.deleteEventsByTargetId(event.getSubjectID()); + } + } + } + + public void end(Context context) throws Exception { + } + +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/QASource.java b/dspace-api/src/main/java/org/dspace/qaevent/QASource.java new file mode 100644 index 000000000000..e22f7d32a770 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/QASource.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent; + +import java.util.Date; + +/** + * This model class represent the source/provider of the QA events (as Openaire). + * + * @author Luca Giamminonni (luca.giamminonni at 4Science) + * + */ +public class QASource { + private String name; + private long totalEvents; + private Date lastEvent; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public long getTotalEvents() { + return totalEvents; + } + + public void setTotalEvents(long totalEvents) { + this.totalEvents = totalEvents; + } + + public Date getLastEvent() { + return lastEvent; + } + + public void setLastEvent(Date lastEvent) { + this.lastEvent = lastEvent; + } +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/QATopic.java b/dspace-api/src/main/java/org/dspace/qaevent/QATopic.java new file mode 100644 index 000000000000..63e523b9cb5e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/QATopic.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent; + +import java.util.Date; + +/** + * This model class represent the quality assurance broker topic concept. A + * topic represents a type of event and is therefore used to group events. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public class QATopic { + private String key; + private long totalEvents; + private Date lastEvent; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public long getTotalEvents() { + return totalEvents; + } + + public void setTotalEvents(long totalEvents) { + this.totalEvents = totalEvents; + } + + public Date getLastEvent() { + return lastEvent; + } + + public void setLastEvent(Date lastEvent) { + this.lastEvent = lastEvent; + } +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/QualityAssuranceAction.java b/dspace-api/src/main/java/org/dspace/qaevent/QualityAssuranceAction.java new file mode 100644 index 000000000000..f2aebba799bd --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/QualityAssuranceAction.java @@ -0,0 +1,31 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.qaevent.service.dto.QAMessageDTO; + +/** + * Interface for classes that perform a correction on the given item. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public interface QualityAssuranceAction { + + /** + * Perform a correction on the given item. + * + * @param context the DSpace context + * @param item the item to correct + * @param relatedItem the related item, if any + * @param message the message with the correction details + */ + public void applyCorrection(Context context, Item item, Item relatedItem, QAMessageDTO message); +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/action/QAEntityOpenaireMetadataAction.java b/dspace-api/src/main/java/org/dspace/qaevent/action/QAEntityOpenaireMetadataAction.java new file mode 100644 index 000000000000..f244418dd069 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/action/QAEntityOpenaireMetadataAction.java @@ -0,0 +1,180 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.action; + +import java.sql.SQLException; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Collection; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.RelationshipType; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.EntityTypeService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.RelationshipTypeService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Context; +import org.dspace.qaevent.QualityAssuranceAction; +import org.dspace.qaevent.service.dto.OpenaireMessageDTO; +import org.dspace.qaevent.service.dto.QAMessageDTO; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link QualityAssuranceAction} that handle the relationship between the + * item to correct and a related item. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public class QAEntityOpenaireMetadataAction implements QualityAssuranceAction { + private String relation; + private String entityType; + private Map entityMetadata; + + @Autowired + private InstallItemService installItemService; + + @Autowired + private ItemService itemService; + + @Autowired + private EntityTypeService entityTypeService; + + @Autowired + private RelationshipService relationshipService; + + @Autowired + private RelationshipTypeService relationshipTypeService; + + @Autowired + private WorkspaceItemService workspaceItemService; + + @Autowired + private CollectionService collectionService; + + public void setItemService(ItemService itemService) { + this.itemService = itemService; + } + + public String getRelation() { + return relation; + } + + public void setRelation(String relation) { + this.relation = relation; + } + + public String[] splitMetadata(String metadata) { + String[] result = new String[3]; + String[] split = metadata.split("\\."); + result[0] = split[0]; + result[1] = split[1]; + if (split.length == 3) { + result[2] = split[2]; + } + return result; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public Map getEntityMetadata() { + return entityMetadata; + } + + public void setEntityMetadata(Map entityMetadata) { + this.entityMetadata = entityMetadata; + } + + @Override + public void applyCorrection(Context context, Item item, Item relatedItem, QAMessageDTO message) { + try { + if (relatedItem != null) { + link(context, item, relatedItem); + } else { + + Collection collection = collectionService.retrieveCollectionWithSubmitByEntityType(context, + item, entityType); + if (collection == null) { + throw new IllegalStateException("No collection found by entity type: " + collection); + } + + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, true); + relatedItem = workspaceItem.getItem(); + + for (String key : entityMetadata.keySet()) { + String value = getValue(message, key); + if (StringUtils.isNotBlank(value)) { + String[] targetMetadata = splitMetadata(entityMetadata.get(key)); + itemService.addMetadata(context, relatedItem, targetMetadata[0], targetMetadata[1], + targetMetadata[2], null, value); + } + } + installItemService.installItem(context, workspaceItem); + itemService.update(context, relatedItem); + link(context, item, relatedItem); + } + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + } + + /** + * Create a new relationship between the two given item, based on the configured + * relation. + */ + private void link(Context context, Item item, Item relatedItem) throws SQLException, AuthorizeException { + EntityType project = entityTypeService.findByEntityType(context, entityType); + RelationshipType relType = relationshipTypeService.findByEntityType(context, project).stream() + .filter(r -> StringUtils.equals(r.getRightwardType(), relation)).findFirst() + .orElseThrow(() -> new IllegalStateException("No relationshipType named " + relation + + " was found for the entity type " + entityType + + ". A proper configuration is required to use the QAEntitiyMetadataAction." + + " If you don't manage funding in your repository please skip this topic in" + + " the qaevents.cfg")); + // Create the relationship + relationshipService.create(context, item, relatedItem, relType, -1, -1); + } + + private String getValue(QAMessageDTO message, String key) { + if (!(message instanceof OpenaireMessageDTO)) { + return null; + } + + OpenaireMessageDTO openaireMessage = (OpenaireMessageDTO) message; + + if (StringUtils.equals(key, "acronym")) { + return openaireMessage.getAcronym(); + } else if (StringUtils.equals(key, "code")) { + return openaireMessage.getCode(); + } else if (StringUtils.equals(key, "funder")) { + return openaireMessage.getFunder(); + } else if (StringUtils.equals(key, "fundingProgram")) { + return openaireMessage.getFundingProgram(); + } else if (StringUtils.equals(key, "jurisdiction")) { + return openaireMessage.getJurisdiction(); + } else if (StringUtils.equals(key, "openaireId")) { + return openaireMessage.getOpenaireId(); + } else if (StringUtils.equals(key, "title")) { + return openaireMessage.getTitle(); + } + + return null; + } +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/action/QAOpenaireMetadataMapAction.java b/dspace-api/src/main/java/org/dspace/qaevent/action/QAOpenaireMetadataMapAction.java new file mode 100644 index 000000000000..e1fa23002fcb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/action/QAOpenaireMetadataMapAction.java @@ -0,0 +1,86 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.action; + +import java.sql.SQLException; +import java.util.Map; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.qaevent.QualityAssuranceAction; +import org.dspace.qaevent.service.dto.OpenaireMessageDTO; +import org.dspace.qaevent.service.dto.QAMessageDTO; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link QualityAssuranceAction} that add a specific metadata on the given + * item based on the OPENAIRE message type. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public class QAOpenaireMetadataMapAction implements QualityAssuranceAction { + public static final String DEFAULT = "default"; + + private Map types; + @Autowired + private ItemService itemService; + + public void setItemService(ItemService itemService) { + this.itemService = itemService; + } + + public Map getTypes() { + return types; + } + + public void setTypes(Map types) { + this.types = types; + } + + /** + * Apply the correction on one metadata field of the given item based on the + * openaire message type. + */ + @Override + public void applyCorrection(Context context, Item item, Item relatedItem, QAMessageDTO message) { + + if (!(message instanceof OpenaireMessageDTO)) { + throw new IllegalArgumentException("Unsupported message type: " + message.getClass()); + } + + OpenaireMessageDTO openaireMessage = (OpenaireMessageDTO) message; + + try { + String targetMetadata = types.get(openaireMessage.getType()); + if (targetMetadata == null) { + targetMetadata = types.get(DEFAULT); + } + String[] metadata = splitMetadata(targetMetadata); + itemService.addMetadata(context, item, metadata[0], metadata[1], metadata[2], null, + openaireMessage.getValue()); + itemService.update(context, item); + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + + } + + public String[] splitMetadata(String metadata) { + String[] result = new String[3]; + String[] split = metadata.split("\\."); + result[0] = split[0]; + result[1] = split[1]; + if (split.length == 3) { + result[2] = split[2]; + } + return result; + } +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/action/QAOpenaireSimpleMetadataAction.java b/dspace-api/src/main/java/org/dspace/qaevent/action/QAOpenaireSimpleMetadataAction.java new file mode 100644 index 000000000000..2509b768aefb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/action/QAOpenaireSimpleMetadataAction.java @@ -0,0 +1,64 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.action; + +import java.sql.SQLException; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.qaevent.QualityAssuranceAction; +import org.dspace.qaevent.service.dto.OpenaireMessageDTO; +import org.dspace.qaevent.service.dto.QAMessageDTO; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link QualityAssuranceAction} that add a simple metadata to the given + * item. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public class QAOpenaireSimpleMetadataAction implements QualityAssuranceAction { + private String metadata; + private String metadataSchema; + private String metadataElement; + private String metadataQualifier; + @Autowired + private ItemService itemService; + + public void setItemService(ItemService itemService) { + this.itemService = itemService; + } + + public String getMetadata() { + return metadata; + } + + public void setMetadata(String metadata) { + this.metadata = metadata; + String[] split = metadata.split("\\."); + this.metadataSchema = split[0]; + this.metadataElement = split[1]; + if (split.length == 3) { + this.metadataQualifier = split[2]; + } + } + + @Override + public void applyCorrection(Context context, Item item, Item relatedItem, QAMessageDTO message) { + try { + itemService.addMetadata(context, item, metadataSchema, metadataElement, metadataQualifier, null, + ((OpenaireMessageDTO) message).getAbstracts()); + itemService.update(context, item); + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/dao/QAEventsDAO.java b/dspace-api/src/main/java/org/dspace/qaevent/dao/QAEventsDAO.java new file mode 100644 index 000000000000..98c38ca3f5a9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/dao/QAEventsDAO.java @@ -0,0 +1,92 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.dao; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.content.QAEventProcessed; +import org.dspace.core.Context; +import org.dspace.core.GenericDAO; +import org.dspace.eperson.EPerson; + +/** + * DAO that handle processed QA Events. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public interface QAEventsDAO extends GenericDAO { + + /** + * Returns all the stored QAEventProcessed entities. + * + * @param context the DSpace context + * @return the found entities + * @throws SQLException if an SQL error occurs + */ + public List findAll(Context context) throws SQLException; + + /** + * Returns the stored QAEventProcessed entities by item. + * + * @param context the DSpace context + * @param item the item to search for + * @return the found entities + * @throws SQLException if an SQL error occurs + */ + public List findByItem(Context context, Item item) throws SQLException; + + /** + * Returns the stored QAEventProcessed entities by eperson. + * + * @param context the DSpace context + * @param ePerson the ePerson to search for + * @return the found entities + * @throws SQLException if an SQL error occurs + */ + public List findByEPerson(Context context, EPerson ePerson) throws SQLException; + + /** + * Search a page of quality assurance broker events by notification ID. + * + * @param context the DSpace context + * @param eventId the event id + * @param start the start index + * @param size the size to be applied + * @return the processed events + * @throws SQLException if an SQL error occurs + */ + public List searchByEventId(Context context, String eventId, Integer start, Integer size) + throws SQLException; + + /** + * Check if an event with the given checksum is already stored. + * + * @param context the DSpace context + * @param checksum the checksum to search for + * @return true if the given checksum is related to an already + * stored event, false otherwise + * @throws SQLException if an SQL error occurs + */ + public boolean isEventStored(Context context, String checksum) throws SQLException; + + /** + * Store an event related to the given checksum. + * + * @param context the DSpace context + * @param checksum the checksum of the event to be store + * @param eperson the eperson who handle the event + * @param item the item related to the event + * @return true if the creation is completed with success, false + * otherwise + */ + boolean storeEvent(Context context, String checksum, EPerson eperson, Item item); + +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/dao/impl/QAEventsDAOImpl.java b/dspace-api/src/main/java/org/dspace/qaevent/dao/impl/QAEventsDAOImpl.java new file mode 100644 index 000000000000..ac9b96045e42 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/dao/impl/QAEventsDAOImpl.java @@ -0,0 +1,90 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.dao.impl; + +import java.sql.SQLException; +import java.util.Date; +import java.util.List; +import javax.persistence.Query; + +import org.dspace.content.Item; +import org.dspace.content.QAEventProcessed; +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.qaevent.dao.QAEventsDAO; + +/** + * Implementation of {@link QAEventsDAO} that store processed events using an + * SQL DBMS. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public class QAEventsDAOImpl extends AbstractHibernateDAO implements QAEventsDAO { + + @Override + public List findAll(Context context) throws SQLException { + return findAll(context, QAEventProcessed.class); + } + + @Override + public boolean storeEvent(Context context, String checksum, EPerson eperson, Item item) { + QAEventProcessed qaEvent = new QAEventProcessed(); + qaEvent.setEperson(eperson); + qaEvent.setEventId(checksum); + qaEvent.setItem(item); + qaEvent.setEventTimestamp(new Date()); + try { + create(context, qaEvent); + return true; + } catch (SQLException e) { + return false; + } + } + + @Override + public boolean isEventStored(Context context, String checksum) throws SQLException { + Query query = createQuery(context, + "SELECT count(eventId) FROM QAEventProcessed qaevent WHERE qaevent.eventId = :event_id "); + query.setParameter("event_id", checksum); + return count(query) != 0; + } + + @Override + public List searchByEventId(Context context, String eventId, Integer start, Integer size) + throws SQLException { + Query query = createQuery(context, + "SELECT * FROM QAEventProcessed qaevent WHERE qaevent.qaevent_id = :event_id "); + query.setFirstResult(start); + query.setMaxResults(size); + query.setParameter("event_id", eventId); + return findMany(context, query); + } + + @Override + public List findByItem(Context context, Item item) throws SQLException { + Query query = createQuery(context, "" + + " SELECT qaevent " + + " FROM QAEventProcessed qaevent " + + " WHERE qaevent.item = :item "); + query.setParameter("item", item); + return findMany(context, query); + } + + @Override + public List findByEPerson(Context context, EPerson ePerson) throws SQLException { + Query query = createQuery(context, "" + + " SELECT qaevent " + + " FROM QAEventProcessed qaevent " + + " WHERE qaevent.eperson = :eperson "); + query.setParameter("eperson", ePerson); + return findMany(context, query); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImport.java b/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImport.java new file mode 100644 index 000000000000..9087606aa6e5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImport.java @@ -0,0 +1,314 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.script; + + +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.substringAfter; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URL; +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; +import eu.dnetlib.broker.BrokerClient; +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.dspace.content.QAEvent; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.qaevent.service.OpenaireClientFactory; +import org.dspace.qaevent.service.QAEventService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.utils.DSpace; + +/** + * Implementation of {@link DSpaceRunnable} to perfom a QAEvents import from a + * json file. The JSON file contains an array of JSON Events, where each event + * has the following structure. The message attribute follows the structure + * documented at + * @see see + * + *
    + * {
    + * "originalId": "oai:www.openstarts.units.it:10077/21838",
    + * "title": "Egypt, crossroad of translations and literary interweavings",
    + * "topic": "ENRICH/MORE/PROJECT",
    + * "trust": 1.0,
    + * "message": {
    + * "projects[0].acronym": "PAThs",
    + * "projects[0].code": "687567",
    + * "projects[0].funder": "EC",
    + * "projects[0].fundingProgram": "H2020",
    + * "projects[0].jurisdiction": "EU",
    + * "projects[0].openaireId": "40|corda__h2020::6e32f5eb912688f2424c68b851483ea4",
    + * "projects[0].title": "Tracking Papyrus and Parchment Paths"
    + * }
    + * } + *
    + * + * @author Alessandro Martelli (alessandro.martelli at 4science.it) + * @author Luca Giamminonni (luca.giamminonni at 4Science.it) + * + */ +public class OpenaireEventsImport + extends DSpaceRunnable> { + + private QAEventService qaEventService; + + private String[] topicsToImport; + + private ConfigurationService configurationService; + + private BrokerClient brokerClient; + + private ObjectMapper jsonMapper; + + private URL openaireBrokerURL; + + private String fileLocation; + + private String email; + + private Context context; + + @Override + @SuppressWarnings({ "rawtypes" }) + public OpenaireEventsImportScriptConfiguration getScriptConfiguration() { + OpenaireEventsImportScriptConfiguration configuration = new DSpace().getServiceManager() + .getServiceByName("import-openaire-events", OpenaireEventsImportScriptConfiguration.class); + return configuration; + } + + @Override + public void setup() throws ParseException { + + jsonMapper = new JsonMapper(); + jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + + qaEventService = new DSpace().getSingletonService(QAEventService.class); + configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + brokerClient = OpenaireClientFactory.getInstance().getBrokerClient(); + + topicsToImport = configurationService.getArrayProperty("qaevents.openaire.import.topic"); + openaireBrokerURL = getOpenaireBrokerUri(); + + fileLocation = commandLine.getOptionValue("f"); + email = commandLine.getOptionValue("e"); + + } + + @Override + public void internalRun() throws Exception { + + if (StringUtils.isAllBlank(fileLocation, email)) { + throw new IllegalArgumentException("One parameter between the location of the file and the email " + + "must be entered to proceed with the import."); + } + + if (StringUtils.isNoneBlank(fileLocation, email)) { + throw new IllegalArgumentException("Only one parameter between the location of the file and the email " + + "must be entered to proceed with the import."); + } + + context = new Context(); + assignCurrentUserInContext(); + + try { + importOpenaireEvents(); + } catch (Exception ex) { + handler.logError("A not recoverable error occurs during OPENAIRE events import: " + getMessage(ex), ex); + throw ex; + } + + } + + /** + * Read the OPENAIRE events from the given JSON file or directly from the + * OPENAIRE broker and try to store them. + */ + private void importOpenaireEvents() throws Exception { + + if (StringUtils.isNotBlank(fileLocation)) { + handler.logInfo("Trying to read the QA events from the provided file"); + importOpenaireEventsFromFile(); + } else { + handler.logInfo("Trying to read the QA events from the OPENAIRE broker"); + importOpenaireEventsFromBroker(); + } + + } + + /** + * Read the OPENAIRE events from the given file location and try to store them. + */ + private void importOpenaireEventsFromFile() throws Exception { + + InputStream eventsFileInputStream = getQAEventsFileInputStream(); + List qaEvents = readOpenaireQAEventsFromJson(eventsFileInputStream); + + handler.logInfo("Found " + qaEvents.size() + " events in the given file"); + + storeOpenaireQAEvents(qaEvents); + + } + + /** + * Import the OPENAIRE events from the Broker using the subscription related to + * the given email and try to store them. + */ + private void importOpenaireEventsFromBroker() { + + List subscriptionIds = listEmailSubscriptions(); + + handler.logInfo("Found " + subscriptionIds.size() + " subscriptions related to the given email"); + + for (String subscriptionId : subscriptionIds) { + + List events = readOpenaireQAEventsFromBroker(subscriptionId); + + handler.logInfo("Found " + events.size() + " events from the subscription " + subscriptionId); + + storeOpenaireQAEvents(events); + + } + } + + /** + * Obtain an InputStream from the runnable instance. + */ + private InputStream getQAEventsFileInputStream() throws Exception { + return handler.getFileStream(context, fileLocation) + .orElseThrow(() -> new IllegalArgumentException("Error reading file, the file couldn't be " + + "found for filename: " + fileLocation)); + } + + /** + * Read all the QAEvent from the OPENAIRE Broker related to the subscription + * with the given id. + */ + private List readOpenaireQAEventsFromBroker(String subscriptionId) { + + try { + InputStream eventsInputStream = getEventsBySubscriptions(subscriptionId); + return readOpenaireQAEventsFromJson(eventsInputStream); + } catch (Exception ex) { + handler.logError("An error occurs downloading the events related to the subscription " + + subscriptionId + ": " + getMessage(ex), ex); + } + + return List.of(); + + } + + /** + * Read all the QAEvent present in the given input stream. + * + * @return the QA events to be imported + */ + private List readOpenaireQAEventsFromJson(InputStream inputStream) throws Exception { + return jsonMapper.readValue(inputStream, new TypeReference>() { + }); + } + + /** + * Store the given QAEvents. + * + * @param events the event to be stored + */ + private void storeOpenaireQAEvents(List events) { + for (QAEvent event : events) { + try { + storeOpenaireQAEvent(event); + } catch (RuntimeException e) { + handler.logWarning("An error occurs storing the event with id " + + event.getEventId() + ": " + getMessage(e)); + } + } + } + + /** + * Store the given QAEvent, skipping it if it is not supported. + * + * @param event the event to be stored + */ + private void storeOpenaireQAEvent(QAEvent event) { + + if (!StringUtils.equalsAny(event.getTopic(), topicsToImport)) { + handler.logWarning("Event for topic " + event.getTopic() + " is not allowed in the qaevents.cfg"); + return; + } + + event.setSource(QAEvent.OPENAIRE_SOURCE); + + qaEventService.store(context, event); + + } + + /** + * Download the events related to the given subscription from the OPENAIRE broker. + * + * @param subscriptionId the subscription id + * @return an input stream from which to read the events in json format + */ + private InputStream getEventsBySubscriptions(String subscriptionId) throws Exception { + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + brokerClient.downloadEvents(openaireBrokerURL, subscriptionId, outputStream); + return new ByteArrayInputStream(outputStream.toByteArray()); + } + + /** + * Takes all the subscription related to the given email from the OPENAIRE + * broker. + */ + private List listEmailSubscriptions() { + try { + return brokerClient.listSubscriptions(openaireBrokerURL, email); + } catch (Exception ex) { + throw new IllegalArgumentException("An error occurs retriving the subscriptions " + + "from the OPENAIRE broker: " + getMessage(ex), ex); + } + } + + private URL getOpenaireBrokerUri() { + try { + return new URL(configurationService.getProperty("qaevents.openaire.broker-url", "http://api.openaire.eu/broker")); + } catch (MalformedURLException e) { + throw new IllegalStateException("The configured OPENAIRE broker URL is not valid.", e); + } + } + + /** + * Get the root exception message from the given exception. + */ + private String getMessage(Exception ex) { + String message = ExceptionUtils.getRootCauseMessage(ex); + // Remove the Exception name from the message + return isNotBlank(message) ? substringAfter(message, ":").trim() : ""; + } + + private void assignCurrentUserInContext() throws SQLException { + UUID uuid = getEpersonIdentifier(); + if (uuid != null) { + EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid); + context.setCurrentUser(ePerson); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImportCli.java b/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImportCli.java new file mode 100644 index 000000000000..d98b578cdd38 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImportCli.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.script; + +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.ParseException; +import org.dspace.utils.DSpace; + +/** + * Extensions of {@link OpenaireEventsImport} to run the script on console. + * + * @author Alessandro Martelli (alessandro.martelli at 4science.it) + * + */ +public class OpenaireEventsImportCli extends OpenaireEventsImport { + + @Override + @SuppressWarnings({ "rawtypes" }) + public OpenaireEventsImportCliScriptConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager() + .getServiceByName("import-openaire-events", OpenaireEventsImportCliScriptConfiguration.class); + } + + @Override + public void setup() throws ParseException { + super.setup(); + + // in case of CLI we show the help prompt + if (commandLine.hasOption('h')) { + HelpFormatter formatter = new HelpFormatter(); + formatter.printHelp("Import Notification event json file", getScriptConfiguration().getOptions()); + System.exit(0); + } + + } + +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImportCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImportCliScriptConfiguration.java new file mode 100644 index 000000000000..5be0453a17f4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImportCliScriptConfiguration.java @@ -0,0 +1,31 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.script; + +import org.apache.commons.cli.Options; + +/** + * Extension of {@link OpenaireEventsImportScriptConfiguration} to run the script on + * console. + * + * @author Alessandro Martelli (alessandro.martelli at 4science.it) + * + */ +public class OpenaireEventsImportCliScriptConfiguration + extends OpenaireEventsImportScriptConfiguration { + + @Override + public Options getOptions() { + Options options = super.getOptions(); + options.addOption("h", "help", false, "help"); + options.getOption("h").setType(boolean.class); + super.options = options; + return options; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImportScriptConfiguration.java new file mode 100644 index 000000000000..60001e73507d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/script/OpenaireEventsImportScriptConfiguration.java @@ -0,0 +1,72 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.script; + +import java.io.InputStream; + +import org.apache.commons.cli.Options; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * Extension of {@link ScriptConfiguration} to perfom a QAEvents import from + * file. + * + * @author Alessandro Martelli (alessandro.martelli at 4science.it) + * + */ +public class OpenaireEventsImportScriptConfiguration extends ScriptConfiguration { + + /* + private AuthorizeService authorizeService; + */ + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + /** + * Generic setter for the dspaceRunnableClass + * @param dspaceRunnableClass The dspaceRunnableClass to be set on this OpenaireEventsImportScriptConfiguration + */ + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } +/* + @Override + public boolean isAllowedToExecute(Context context) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } +*/ + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "file", true, "Import data from Openaire quality assurance broker JSON file." + + " This parameter is mutually exclusive to the email parameter."); + options.getOption("f").setType(InputStream.class); + options.getOption("f").setRequired(false); + + options.addOption("e", "email", true, "Email related to the subscriptions to import data from Openaire " + + "broker. This parameter is mutually exclusive to the file parameter."); + options.getOption("e").setType(String.class); + options.getOption("e").setRequired(false); + + super.options = options; + } + return options; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/service/OpenaireClientFactory.java b/dspace-api/src/main/java/org/dspace/qaevent/service/OpenaireClientFactory.java new file mode 100644 index 000000000000..e7a7be33c1b0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/service/OpenaireClientFactory.java @@ -0,0 +1,31 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.service; + +import eu.dnetlib.broker.BrokerClient; +import org.dspace.utils.DSpace; + +/** + * Factory for the {@link BrokerClient}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface OpenaireClientFactory { + + /** + * Returns an instance of the {@link BrokerClient}. + * + * @return the client instance + */ + public BrokerClient getBrokerClient(); + + public static OpenaireClientFactory getInstance() { + return new DSpace().getServiceManager().getServiceByName("openaireClientFactory", OpenaireClientFactory.class); + } +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/service/QAEventActionService.java b/dspace-api/src/main/java/org/dspace/qaevent/service/QAEventActionService.java new file mode 100644 index 000000000000..2e5690f6225b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/service/QAEventActionService.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.service; + +import org.dspace.content.QAEvent; +import org.dspace.core.Context; + +/** + * Service that handle the actions that can be done related to an + * {@link QAEvent}. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public interface QAEventActionService { + + /** + * Accept the given event. + * + * @param context the DSpace context + * @param qaevent the event to be accepted + */ + public void accept(Context context, QAEvent qaevent); + + /** + * Discard the given event. + * + * @param context the DSpace context + * @param qaevent the event to be discarded + */ + public void discard(Context context, QAEvent qaevent); + + /** + * Reject the given event. + * + * @param context the DSpace context + * @param qaevent the event to be rejected + */ + public void reject(Context context, QAEvent qaevent); +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/service/QAEventService.java b/dspace-api/src/main/java/org/dspace/qaevent/service/QAEventService.java new file mode 100644 index 000000000000..2332a55caf52 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/service/QAEventService.java @@ -0,0 +1,158 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.service; + +import java.util.List; +import java.util.UUID; + +import org.dspace.content.QAEvent; +import org.dspace.core.Context; +import org.dspace.qaevent.QASource; +import org.dspace.qaevent.QATopic; + +/** + * Service that handles {@link QAEvent}. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public interface QAEventService { + + /** + * Find all the event's topics. + * + * @param offset the offset to apply + * @return the topics list + */ + public List findAllTopics(long offset, long count, String orderField, boolean ascending); + /** + * Find all the event's topics related to the given source. + * + * @param source the source to search for + * @param offset the offset to apply + * @param count the page size + * @return the topics list + */ + public List findAllTopicsBySource(String source, long offset, long count, + String orderField, boolean ascending); + + /** + * Count all the event's topics. + * + * @return the count result + */ + public long countTopics(); + + /** + * Count all the event's topics related to the given source. + * + * @param source the source to search for + * @return the count result + */ + public long countTopicsBySource(String source); + + /** + * Find all the events by topic. + * + * @param topic the topic to search for + * @param offset the offset to apply + * @param pageSize the page size + * @param orderField the field to order for + * @param ascending true if the order should be ascending, false otherwise + * @return the events + */ + public List findEventsByTopicAndPage(String topic, long offset, int pageSize, + String orderField, boolean ascending); + + /** + * Find all the events by topic. + * + * @param topic the topic to search for + * @return the events + */ + public List findEventsByTopic(String topic); + + /** + * Find all the events by topic. + * + * @param topic the topic to search for + * @return the events count + */ + public long countEventsByTopic(String topic); + + /** + * Find an event by the given id. + * + * @param id the id of the event to search for + * @return the event + */ + public QAEvent findEventByEventId(String id); + + /** + * Store the given event. + * + * @param context the DSpace context + * @param event the event to store + */ + public void store(Context context, QAEvent event); + + /** + * Delete an event by the given id. + * + * @param id the id of the event to delete + */ + public void deleteEventByEventId(String id); + + /** + * Delete events by the given target id. + * + * @param targetId the id of the target id + */ + public void deleteEventsByTargetId(UUID targetId); + + /** + * Find a specific topid by the given id. + * + * @param topicId the topic id to search for + * @return the topic + */ + public QATopic findTopicByTopicId(String topicId); + + /** + * Find a specific source by the given name. + * + * @param source the source name + * @return the source + */ + public QASource findSource(String source); + + /** + * Find all the event's sources. + * + * @param offset the offset to apply + * @param pageSize the page size + * @return the sources list + */ + public List findAllSources(long offset, int pageSize); + + /** + * Count all the event's sources. + * + * @return the count result + */ + public long countSources(); + + /** + * Check if the given QA event supports a related item. + * + * @param qaevent the event to be verified + * @return true if the event supports a related item, false otherwise. + */ + public boolean isRelatedItemSupported(QAEvent qaevent); + +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/service/dto/OpenaireMessageDTO.java b/dspace-api/src/main/java/org/dspace/qaevent/service/dto/OpenaireMessageDTO.java new file mode 100644 index 000000000000..59b4acf9db21 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/service/dto/OpenaireMessageDTO.java @@ -0,0 +1,173 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.service.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * Implementation of {@link QAMessageDTO} that model message coming from OPENAIRE. + * @see see + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OpenaireMessageDTO implements QAMessageDTO { + + @JsonProperty("pids[0].value") + private String value; + + @JsonProperty("pids[0].type") + private String type; + + @JsonProperty("instances[0].hostedby") + private String instanceHostedBy; + + @JsonProperty("instances[0].instancetype") + private String instanceInstanceType; + + @JsonProperty("instances[0].license") + private String instanceLicense; + + @JsonProperty("instances[0].url") + private String instanceUrl; + + @JsonProperty("abstracts[0]") + private String abstracts; + + @JsonProperty("projects[0].acronym") + private String acronym; + + @JsonProperty("projects[0].code") + private String code; + + @JsonProperty("projects[0].funder") + private String funder; + + @JsonProperty("projects[0].fundingProgram") + private String fundingProgram; + + @JsonProperty("projects[0].jurisdiction") + private String jurisdiction; + + @JsonProperty("projects[0].openaireId") + private String openaireId; + + @JsonProperty("projects[0].title") + private String title; + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getInstanceHostedBy() { + return instanceHostedBy; + } + + public void setInstanceHostedBy(String instanceHostedBy) { + this.instanceHostedBy = instanceHostedBy; + } + + public String getInstanceInstanceType() { + return instanceInstanceType; + } + + public void setInstanceInstanceType(String instanceInstanceType) { + this.instanceInstanceType = instanceInstanceType; + } + + public String getInstanceLicense() { + return instanceLicense; + } + + public void setInstanceLicense(String instanceLicense) { + this.instanceLicense = instanceLicense; + } + + public String getInstanceUrl() { + return instanceUrl; + } + + public void setInstanceUrl(String instanceUrl) { + this.instanceUrl = instanceUrl; + } + + public String getAbstracts() { + return abstracts; + } + + public void setAbstracts(String abstracts) { + this.abstracts = abstracts; + } + + public String getAcronym() { + return acronym; + } + + public void setAcronym(String acronym) { + this.acronym = acronym; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getFunder() { + return funder; + } + + public void setFunder(String funder) { + this.funder = funder; + } + + public String getFundingProgram() { + return fundingProgram; + } + + public void setFundingProgram(String fundingProgram) { + this.fundingProgram = fundingProgram; + } + + public String getJurisdiction() { + return jurisdiction; + } + + public void setJurisdiction(String jurisdiction) { + this.jurisdiction = jurisdiction; + } + + public String getOpenaireId() { + return openaireId; + } + + public void setOpenaireId(String openaireId) { + this.openaireId = openaireId; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/service/dto/QAMessageDTO.java b/dspace-api/src/main/java/org/dspace/qaevent/service/dto/QAMessageDTO.java new file mode 100644 index 000000000000..2a63f42e615c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/service/dto/QAMessageDTO.java @@ -0,0 +1,21 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.service.dto; + +import org.dspace.content.QAEvent; + +/** + * Interface for classes that contains the details related to a {@link QAEvent}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface QAMessageDTO { + + +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/service/impl/OpenaireClientFactoryImpl.java b/dspace-api/src/main/java/org/dspace/qaevent/service/impl/OpenaireClientFactoryImpl.java new file mode 100644 index 000000000000..5839f5e8776f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/service/impl/OpenaireClientFactoryImpl.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.service.impl; + +import eu.dnetlib.broker.BrokerClient; +import org.dspace.qaevent.service.OpenaireClientFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link OpenaireClientFactory} that returns the instance of + * {@link BrokerClient} managed by the Spring context. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OpenaireClientFactoryImpl implements OpenaireClientFactory { + + @Autowired + private BrokerClient brokerClient; + + @Override + public BrokerClient getBrokerClient() { + return brokerClient; + } + + public void setBrokerClient(BrokerClient brokerClient) { + this.brokerClient = brokerClient; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/qaevent/service/impl/QAEventActionServiceImpl.java b/dspace-api/src/main/java/org/dspace/qaevent/service/impl/QAEventActionServiceImpl.java new file mode 100644 index 000000000000..cca70ecd0430 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/service/impl/QAEventActionServiceImpl.java @@ -0,0 +1,127 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.service.impl; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Map; +import java.util.UUID; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.content.QAEvent; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.qaevent.QualityAssuranceAction; +import org.dspace.qaevent.service.QAEventActionService; +import org.dspace.qaevent.service.QAEventService; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link QAEventActionService}. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public class QAEventActionServiceImpl implements QAEventActionService { + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(QAEventActionServiceImpl.class); + + private ObjectMapper jsonMapper; + + @Autowired + private QAEventService qaEventService; + + @Autowired + private ItemService itemService; + + @Autowired + private ConfigurationService configurationService; + + private Map topicsToActions; + + public void setTopicsToActions(Map topicsToActions) { + this.topicsToActions = topicsToActions; + } + + public Map getTopicsToActions() { + return topicsToActions; + } + + public QAEventActionServiceImpl() { + jsonMapper = new JsonMapper(); + jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + } + + @Override + public void accept(Context context, QAEvent qaevent) { + Item item = null; + Item related = null; + try { + item = itemService.find(context, UUID.fromString(qaevent.getTarget())); + if (qaevent.getRelated() != null) { + related = itemService.find(context, UUID.fromString(qaevent.getRelated())); + } + topicsToActions.get(qaevent.getTopic()).applyCorrection(context, item, related, + jsonMapper.readValue(qaevent.getMessage(), qaevent.getMessageDtoClass())); + qaEventService.deleteEventByEventId(qaevent.getEventId()); + makeAcknowledgement(qaevent.getEventId(), qaevent.getSource(), QAEvent.ACCEPTED); + } catch (SQLException | JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + @Override + public void discard(Context context, QAEvent qaevent) { + qaEventService.deleteEventByEventId(qaevent.getEventId()); + makeAcknowledgement(qaevent.getEventId(), qaevent.getSource(), QAEvent.DISCARDED); + } + + @Override + public void reject(Context context, QAEvent qaevent) { + qaEventService.deleteEventByEventId(qaevent.getEventId()); + makeAcknowledgement(qaevent.getEventId(), qaevent.getSource(), QAEvent.REJECTED); + } + + /** + * Make acknowledgement to the configured urls for the event status. + */ + private void makeAcknowledgement(String eventId, String source, String status) { + String[] ackwnoledgeCallbacks = configurationService + .getArrayProperty("qaevents." + source + ".acknowledge-url"); + if (ackwnoledgeCallbacks != null) { + for (String ackwnoledgeCallback : ackwnoledgeCallbacks) { + if (StringUtils.isNotBlank(ackwnoledgeCallback)) { + ObjectNode node = jsonMapper.createObjectNode(); + node.put("eventId", eventId); + node.put("status", status); + StringEntity requestEntity = new StringEntity(node.toString(), ContentType.APPLICATION_JSON); + CloseableHttpClient httpclient = HttpClients.createDefault(); + HttpPost postMethod = new HttpPost(ackwnoledgeCallback); + postMethod.setEntity(requestEntity); + try { + httpclient.execute(postMethod); + } catch (IOException e) { + log.error(e.getMessage(), e); + } + } + } + } + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/qaevent/service/impl/QAEventServiceImpl.java b/dspace-api/src/main/java/org/dspace/qaevent/service/impl/QAEventServiceImpl.java new file mode 100644 index 000000000000..1dfcc1b6d96a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/qaevent/service/impl/QAEventServiceImpl.java @@ -0,0 +1,467 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.service.impl; + +import static java.util.Comparator.comparing; +import static org.apache.commons.lang3.StringUtils.endsWith; +import static org.dspace.content.QAEvent.OPENAIRE_SOURCE; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; +import org.apache.commons.lang3.ArrayUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.SolrQuery.ORDER; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.request.UpdateRequest; +import org.apache.solr.client.solrj.response.FacetField; +import org.apache.solr.client.solrj.response.FacetField.Count; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; +import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.params.FacetParams; +import org.dspace.content.Item; +import org.dspace.content.QAEvent; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.handle.service.HandleService; +import org.dspace.qaevent.QASource; +import org.dspace.qaevent.QATopic; +import org.dspace.qaevent.dao.QAEventsDAO; +import org.dspace.qaevent.dao.impl.QAEventsDAOImpl; +import org.dspace.qaevent.service.QAEventService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link QAEventService} that use Solr to store events. When + * the user performs an action on the event (such as accepting the suggestion or + * rejecting it) then the event is removed from solr and saved in the database + * (see {@link QAEventsDAO}) so that it is no longer proposed. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public class QAEventServiceImpl implements QAEventService { + + @Autowired(required = true) + protected ConfigurationService configurationService; + + @Autowired(required = true) + protected ItemService itemService; + + @Autowired + private HandleService handleService; + + @Autowired + private QAEventsDAOImpl qaEventsDao; + + private ObjectMapper jsonMapper; + + public QAEventServiceImpl() { + jsonMapper = new JsonMapper(); + jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + } + + /** + * Non-Static CommonsHttpSolrServer for processing indexing events. + */ + protected SolrClient solr = null; + + public static final String SOURCE = "source"; + public static final String ORIGINAL_ID = "original_id"; + public static final String TITLE = "title"; + public static final String TOPIC = "topic"; + public static final String TRUST = "trust"; + public static final String MESSAGE = "message"; + public static final String EVENT_ID = "event_id"; + public static final String RESOURCE_UUID = "resource_uuid"; + public static final String LAST_UPDATE = "last_update"; + public static final String RELATED_UUID = "related_uuid"; + + protected SolrClient getSolr() { + if (solr == null) { + String solrService = DSpaceServicesFactory.getInstance().getConfigurationService() + .getProperty("qaevents.solr.server", "http://localhost:8983/solr/qaevent"); + return new HttpSolrClient.Builder(solrService).build(); + } + return solr; + } + + @Override + public long countTopics() { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setRows(0); + solrQuery.setQuery("*:*"); + solrQuery.setFacet(true); + solrQuery.setFacetMinCount(1); + solrQuery.addFacetField(TOPIC); + QueryResponse response; + try { + response = getSolr().query(solrQuery); + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } + return response.getFacetField(TOPIC).getValueCount(); + } + + @Override + public long countTopicsBySource(String source) { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setRows(0); + solrQuery.setQuery("*:*"); + solrQuery.setFacet(true); + solrQuery.setFacetMinCount(1); + solrQuery.addFacetField(TOPIC); + solrQuery.addFilterQuery("source:" + source); + QueryResponse response; + try { + response = getSolr().query(solrQuery); + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } + return response.getFacetField(TOPIC).getValueCount(); + } + + @Override + public void deleteEventByEventId(String id) { + try { + getSolr().deleteById(id); + getSolr().commit(); + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public void deleteEventsByTargetId(UUID targetId) { + try { + getSolr().deleteByQuery(RESOURCE_UUID + ":" + targetId.toString()); + getSolr().commit(); + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public QATopic findTopicByTopicId(String topicId) { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setRows(0); + solrQuery.setQuery(TOPIC + ":" + topicId.replaceAll("!", "/")); + solrQuery.setFacet(true); + solrQuery.setFacetMinCount(1); + solrQuery.addFacetField(TOPIC); + QueryResponse response; + try { + response = getSolr().query(solrQuery); + FacetField facetField = response.getFacetField(TOPIC); + for (Count c : facetField.getValues()) { + if (c.getName().equals(topicId.replace("!", "/"))) { + QATopic topic = new QATopic(); + topic.setKey(c.getName()); + topic.setTotalEvents(c.getCount()); + topic.setLastEvent(new Date()); + return topic; + } + } + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } + return null; + } + + @Override + public List findAllTopics(long offset, long count, String orderField, boolean ascending) { + return findAllTopicsBySource(null, offset, count, orderField, ascending); + } + + @Override + public List findAllTopicsBySource(String source, long offset, long count, + String orderField, boolean ascending) { + + if (source != null && isNotSupportedSource(source)) { + return null; + } + + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setRows(0); + solrQuery.setSort(orderField, ascending ? ORDER.asc : ORDER.desc); + solrQuery.setFacetSort(FacetParams.FACET_SORT_INDEX); + solrQuery.setQuery("*:*"); + solrQuery.setFacet(true); + solrQuery.setFacetMinCount(1); + solrQuery.setFacetLimit((int) (offset + count)); + solrQuery.addFacetField(TOPIC); + if (source != null) { + solrQuery.addFilterQuery(SOURCE + ":" + source); + } + QueryResponse response; + List topics = new ArrayList<>(); + try { + response = getSolr().query(solrQuery); + FacetField facetField = response.getFacetField(TOPIC); + topics = new ArrayList<>(); + int idx = 0; + for (Count c : facetField.getValues()) { + if (idx < offset) { + idx++; + continue; + } + QATopic topic = new QATopic(); + topic.setKey(c.getName()); + topic.setTotalEvents(c.getCount()); + topic.setLastEvent(new Date()); + topics.add(topic); + idx++; + } + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } + return topics; + } + + @Override + public void store(Context context, QAEvent dto) { + + if (isNotSupportedSource(dto.getSource())) { + throw new IllegalArgumentException("The source of the given event is not supported: " + dto.getSource()); + } + + if (StringUtils.isBlank(dto.getTopic())) { + throw new IllegalArgumentException("A topic is mandatory for an event"); + } + + String checksum = dto.getEventId(); + try { + if (!qaEventsDao.isEventStored(context, checksum)) { + + SolrInputDocument doc = createSolrDocument(context, dto, checksum); + + UpdateRequest updateRequest = new UpdateRequest(); + + updateRequest.add(doc); + updateRequest.process(getSolr()); + + getSolr().commit(); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public QAEvent findEventByEventId(String eventId) { + SolrQuery param = new SolrQuery(EVENT_ID + ":" + eventId); + QueryResponse response; + try { + response = getSolr().query(param); + if (response != null) { + SolrDocumentList list = response.getResults(); + if (list != null && list.size() == 1) { + SolrDocument doc = list.get(0); + return getQAEventFromSOLR(doc); + } + } + } catch (SolrServerException | IOException e) { + throw new RuntimeException("Exception querying Solr", e); + } + return null; + } + + @Override + public List findEventsByTopicAndPage(String topic, long offset, + int pageSize, String orderField, boolean ascending) { + + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setStart(((Long) offset).intValue()); + if (pageSize != -1) { + solrQuery.setRows(pageSize); + } + solrQuery.setSort(orderField, ascending ? ORDER.asc : ORDER.desc); + solrQuery.setQuery(TOPIC + ":" + topic.replaceAll("!", "/")); + + QueryResponse response; + try { + response = getSolr().query(solrQuery); + if (response != null) { + SolrDocumentList list = response.getResults(); + List responseItem = new ArrayList<>(); + for (SolrDocument doc : list) { + QAEvent item = getQAEventFromSOLR(doc); + responseItem.add(item); + } + return responseItem; + } + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } + + return List.of(); + } + + @Override + public List findEventsByTopic(String topic) { + return findEventsByTopicAndPage(topic, 0, -1, TRUST, false); + } + + @Override + public long countEventsByTopic(String topic) { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setRows(0); + solrQuery.setQuery(TOPIC + ":" + topic.replace("!", "/")); + QueryResponse response = null; + try { + response = getSolr().query(solrQuery); + return response.getResults().getNumFound(); + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public QASource findSource(String sourceName) { + + if (isNotSupportedSource(sourceName)) { + return null; + } + + SolrQuery solrQuery = new SolrQuery("*:*"); + solrQuery.setRows(0); + solrQuery.addFilterQuery(SOURCE + ":" + sourceName); + solrQuery.setFacet(true); + solrQuery.setFacetMinCount(1); + solrQuery.addFacetField(SOURCE); + + QueryResponse response; + try { + response = getSolr().query(solrQuery); + FacetField facetField = response.getFacetField(SOURCE); + for (Count c : facetField.getValues()) { + if (c.getName().equalsIgnoreCase(sourceName)) { + QASource source = new QASource(); + source.setName(c.getName()); + source.setTotalEvents(c.getCount()); + source.setLastEvent(new Date()); + return source; + } + } + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } + + QASource source = new QASource(); + source.setName(sourceName); + source.setTotalEvents(0L); + + return source; + } + + @Override + public List findAllSources(long offset, int pageSize) { + return Arrays.stream(getSupportedSources()) + .map((sourceName) -> findSource(sourceName)) + .sorted(comparing(QASource::getTotalEvents).reversed()) + .skip(offset) + .limit(pageSize) + .collect(Collectors.toList()); + } + + @Override + public long countSources() { + return getSupportedSources().length; + } + + @Override + public boolean isRelatedItemSupported(QAEvent qaevent) { + // Currently only PROJECT topics related to OPENAIRE supports related items + return qaevent.getSource().equals(OPENAIRE_SOURCE) && endsWith(qaevent.getTopic(), "/PROJECT"); + } + + private SolrInputDocument createSolrDocument(Context context, QAEvent dto, String checksum) throws Exception { + SolrInputDocument doc = new SolrInputDocument(); + doc.addField(SOURCE, dto.getSource()); + doc.addField(EVENT_ID, checksum); + doc.addField(ORIGINAL_ID, dto.getOriginalId()); + doc.addField(TITLE, dto.getTitle()); + doc.addField(TOPIC, dto.getTopic()); + doc.addField(TRUST, dto.getTrust()); + doc.addField(MESSAGE, dto.getMessage()); + doc.addField(LAST_UPDATE, new Date()); + final String resourceUUID = getResourceUUID(context, dto.getOriginalId()); + if (resourceUUID == null) { + throw new IllegalArgumentException("Skipped event " + checksum + + " related to the oai record " + dto.getOriginalId() + " as the record was not found"); + } + doc.addField(RESOURCE_UUID, resourceUUID); + doc.addField(RELATED_UUID, dto.getRelated()); + return doc; + } + + private String getResourceUUID(Context context, String originalId) throws Exception { + String id = getHandleFromOriginalId(originalId); + if (id != null) { + Item item = (Item) handleService.resolveToObject(context, id); + if (item != null) { + final String itemUuid = item.getID().toString(); + context.uncacheEntity(item); + return itemUuid; + } else { + return null; + } + } else { + throw new IllegalArgumentException("Malformed originalId " + originalId); + } + } + + // oai:www.openstarts.units.it:10077/21486 + private String getHandleFromOriginalId(String originalId) { + int startPosition = originalId.lastIndexOf(':'); + if (startPosition != -1) { + return originalId.substring(startPosition + 1, originalId.length()); + } else { + return null; + } + } + + private QAEvent getQAEventFromSOLR(SolrDocument doc) { + QAEvent item = new QAEvent(); + item.setSource((String) doc.get(SOURCE)); + item.setEventId((String) doc.get(EVENT_ID)); + item.setLastUpdate((Date) doc.get(LAST_UPDATE)); + item.setMessage((String) doc.get(MESSAGE)); + item.setOriginalId((String) doc.get(ORIGINAL_ID)); + item.setTarget((String) doc.get(RESOURCE_UUID)); + item.setTitle((String) doc.get(TITLE)); + item.setTopic((String) doc.get(TOPIC)); + item.setTrust((double) doc.get(TRUST)); + item.setRelated((String) doc.get(RELATED_UUID)); + return item; + } + + private boolean isNotSupportedSource(String source) { + return !ArrayUtils.contains(getSupportedSources(), source); + } + + private String[] getSupportedSources() { + return configurationService.getArrayProperty("qaevent.sources", new String[] { QAEvent.OPENAIRE_SOURCE }); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/rdf/RDFConsumer.java b/dspace-api/src/main/java/org/dspace/rdf/RDFConsumer.java index 76ae0cd2d2e6..34ab572d1b16 100644 --- a/dspace-api/src/main/java/org/dspace/rdf/RDFConsumer.java +++ b/dspace-api/src/main/java/org/dspace/rdf/RDFConsumer.java @@ -286,51 +286,54 @@ public void consumeSite(Context ctx, Event event) throws SQLException { @Override public void end(Context ctx) throws Exception { log.debug("Started processing of queued events."); - // create a new context, to be sure to work as anonymous user - // we don't want to store private data in a triplestore with public - // SPARQL endpoint. - ctx = new Context(Context.Mode.READ_ONLY); - if (toDelete == null) { - log.debug("Deletion queue does not exists, creating empty queue."); - this.toDelete = new LinkedList<>(); - } - if (toConvert != null) { - log.debug("Starting conversion of DSpaceObjects."); + // store the context mode, set context read only for performance reasons, and restore the old mode + Context.Mode oldMode = ctx.getCurrentMode(); + try { + ctx.setMode(Context.Mode.READ_ONLY); + if (toDelete == null) { + log.debug("Deletion queue does not exists, creating empty queue."); + this.toDelete = new LinkedList<>(); + } + if (toConvert != null) { + log.debug("Starting conversion of DSpaceObjects."); + while (true) { + DSOIdentifier id; + try { + id = toConvert.removeFirst(); + } catch (NoSuchElementException ex) { + break; + } + + if (toDelete.contains(id)) { + log.debug("Skipping " + Constants.typeText[id.type] + " " + + id.id.toString() + " as it is marked for " + + "deletion as well."); + continue; + } + log.debug("Converting " + Constants.typeText[id.type] + " " + + id.id.toString() + "."); + convert(ctx, id); + } + log.debug("Conversion ended."); + } + log.debug("Starting to delete data from the triple store..."); while (true) { DSOIdentifier id; try { - id = toConvert.removeFirst(); + id = toDelete.removeFirst(); } catch (NoSuchElementException ex) { break; } - if (toDelete.contains(id)) { - log.debug("Skipping " + Constants.typeText[id.type] + " " - + id.id.toString() + " as it is marked for " - + "deletion as well."); - continue; - } - log.debug("Converting " + Constants.typeText[id.type] + " " + log.debug("Going to delete data from " + + Constants.typeText[id.type] + " " + id.id.toString() + "."); - convert(ctx, id); + delete(ctx, id); } - log.debug("Conversion ended."); - } - log.debug("Starting to delete data from the triple store..."); - while (true) { - DSOIdentifier id; - try { - id = toDelete.removeFirst(); - } catch (NoSuchElementException ex) { - break; - } - - log.debug("Going to delete data from " + - Constants.typeText[id.type] + " " - + id.id.toString() + "."); - delete(ctx, id); + } finally { + // restore context mode + ctx.setMode(oldMode); } - ctx.abort(); log.debug("Deletion finished."); } diff --git a/dspace-api/src/main/java/org/dspace/rdf/RDFUtil.java b/dspace-api/src/main/java/org/dspace/rdf/RDFUtil.java index 03ae589c625d..1e9744aec5c5 100644 --- a/dspace-api/src/main/java/org/dspace/rdf/RDFUtil.java +++ b/dspace-api/src/main/java/org/dspace/rdf/RDFUtil.java @@ -48,7 +48,7 @@ public class RDFUtil { public static final String STORAGE_GRAPHSTORE_PASSWORD_KEY = "rdf.storage.graphstore.password"; /** * Property key to load the URL of the dspace-rdf module. This is necessary - * to create links from the jspui or xmlui to RDF representation of + * to create links from the UI to RDF representation of * DSpaceObjects. */ public static final String CONTEXT_PATH_KEY = "rdf.contextPath"; @@ -289,7 +289,7 @@ public static Model convertAndStore(Context context, DSpaceObject dso) public static void isPublic(Context context, DSpaceObject dso) throws SQLException, ItemNotArchivedException, ItemWithdrawnException, ItemNotDiscoverableException, AuthorizeException { - // as there is no way to set site permissions in XMLUI or JSPUI, we + // as there is no way to set site permissions in UI, we // ignore the permissions of the repository root (DSpaceObject of type // Site). if (dso instanceof Site) { diff --git a/dspace-api/src/main/java/org/dspace/rdf/conversion/SimpleDSORelationsConverterPlugin.java b/dspace-api/src/main/java/org/dspace/rdf/conversion/SimpleDSORelationsConverterPlugin.java index 409a4f1518c2..63382a7c26b0 100644 --- a/dspace-api/src/main/java/org/dspace/rdf/conversion/SimpleDSORelationsConverterPlugin.java +++ b/dspace-api/src/main/java/org/dspace/rdf/conversion/SimpleDSORelationsConverterPlugin.java @@ -453,14 +453,13 @@ public Model convertItem(Context context, Item item) } /** - * This methods generataes a link to the provieded Bitstream. + * This methods generates a link to the provided Bitstream. * As bitstreams currently don't get Persistent Identifier in DSpace, we have - * to link them using a link to the repository. This link should work with - * JSPUI and XMLUI (at least it does in DSpace 4.x). + * to link them using a link to the repository. * * @param context The relevant DSpace Context. * @param bitstream Bitstream for which a URL should be generated. - * @return The link to the URL or null if the Bistream is is a Community or + * @return The link to the URL or null if the Bitstream is a Community or * Collection logo. * @throws SQLException if database error */ @@ -476,7 +475,7 @@ public String bitstreamURI(Context context, Bitstream bitstream) String dspaceURL = configurationService.getProperty("dspace.ui.url"); String link = ""; try { - // this currently (DSpace 4.1) works with xmlui and jspui. + // the link to the bitstream in the UI link = dspaceURL + "/bitstream/" + parent.getHandle() + "/" + bitstream.getSequenceID() + "/" + Util.encodeBitstreamName(bitstream.getName(), Constants.DEFAULT_ENCODING); diff --git a/dspace-api/src/main/java/org/dspace/scripts/DSpaceCommandLineParameter.java b/dspace-api/src/main/java/org/dspace/scripts/DSpaceCommandLineParameter.java index 2862d014c924..f8b5f23b8c90 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/DSpaceCommandLineParameter.java +++ b/dspace-api/src/main/java/org/dspace/scripts/DSpaceCommandLineParameter.java @@ -14,7 +14,8 @@ import org.apache.commons.lang3.builder.HashCodeBuilder; /** - * This class serves as a representation of a command line parameter by holding a String name and a String value + * This class serves as a representation of a command line parameter by holding + * a String name and a String value. */ public class DSpaceCommandLineParameter { private String name; @@ -23,7 +24,7 @@ public class DSpaceCommandLineParameter { public static String SEPARATOR = "|||"; /** - * This constructor will take a String key and String value and store them in their appriopriate fields + * This constructor will take a String key and String value and store them in their appropriate fields. * @param key The String value to be stored as the name of the parameter * @param value The String value to be stored as the value of the parameter */ @@ -64,9 +65,10 @@ public void setValue(String value) { /** * Converts the DSpaceCommandLineParameter into a String format by concatenating the value and the name String - * values by separating them with a space + * values by separating them with a space. * @return The String representation of a DSpaceCommandlineParameter object */ + @Override public String toString() { String stringToReturn = ""; stringToReturn += getName(); @@ -92,7 +94,7 @@ public static String concatenate(List parameterList) } /** - * Will return a boolean indicating whether the given param is equal to this object + * Will return a boolean indicating whether the given parameter is equal to this object. * @param other The other object * @return A boolean indicating equality */ @@ -101,7 +103,7 @@ public boolean equals(Object other) { if (other == null) { return false; } - if (other.getClass() != DSpaceCommandLineParameter.class) { + if (!(other instanceof DSpaceCommandLineParameter)) { return false; } return StringUtils.equals(this.getName(), ((DSpaceCommandLineParameter) other).getName()) && StringUtils diff --git a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java index d0fffdb57dab..2ea0a52d6e34 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java +++ b/dspace-api/src/main/java/org/dspace/scripts/DSpaceRunnable.java @@ -8,7 +8,7 @@ package org.dspace.scripts; import java.io.InputStream; -import java.util.LinkedList; +import java.util.ArrayList; import java.util.List; import java.util.UUID; @@ -18,6 +18,7 @@ import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang3.StringUtils; +import org.dspace.cli.DSpaceSkipUnknownArgumentsParser; import org.dspace.eperson.EPerson; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.handler.DSpaceRunnableHandler; @@ -37,7 +38,12 @@ public abstract class DSpaceRunnable implements R protected CommandLine commandLine; /** - * This EPerson identifier variable is the uuid of the eperson that's running the script + * The minimal CommandLine object for the script that'll hold help information + */ + protected CommandLine helpCommandLine; + + /** + * This EPerson identifier variable is the UUID of the EPerson that's running the script */ private UUID epersonIdentifier; @@ -64,26 +70,66 @@ private void setHandler(DSpaceRunnableHandler dSpaceRunnableHandler) { * @param args The arguments given to the script * @param dSpaceRunnableHandler The DSpaceRunnableHandler object that defines from where the script was ran * @param currentUser + * @return the result of this step; StepResult.Continue: continue the normal process, + * initialize is successful; otherwise exit the process (the help or version is shown) * @throws ParseException If something goes wrong */ - public void initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, + public StepResult initialize(String[] args, DSpaceRunnableHandler dSpaceRunnableHandler, EPerson currentUser) throws ParseException { if (currentUser != null) { this.setEpersonIdentifier(currentUser.getID()); } this.setHandler(dSpaceRunnableHandler); - this.parse(args); + + // parse the command line in a first step for the help options + // --> no other option is required + StepResult result = this.parseForHelp(args); + switch (result) { + case Exit: + // arguments of the command line matches the help options, handle this + handleHelpCommandLine(); + break; + + case Continue: + // arguments of the command line matches NOT the help options, parse the args for the normal options + result = this.parse(args); + break; + default: + break; + } + + return result; + } + + + /** + * This method handle the help command line. In this easy implementation only the help is printed. For more + * complexity override this method. + */ + private void handleHelpCommandLine() { + printHelp(); } + /** * This method will take the primitive array of String objects that represent the parameters given to the String * and it'll parse these into a CommandLine object that can be used by the script to retrieve the data * @param args The primitive array of Strings representing the parameters * @throws ParseException If something goes wrong */ - private void parse(String[] args) throws ParseException { + private StepResult parse(String[] args) throws ParseException { commandLine = new DefaultParser().parse(getScriptConfiguration().getOptions(), args); setup(); + return StepResult.Continue; + } + + private StepResult parseForHelp(String[] args) throws ParseException { + helpCommandLine = new DSpaceSkipUnknownArgumentsParser().parse(getScriptConfiguration().getHelpOptions(), args); + if (helpCommandLine.getOptions() != null && helpCommandLine.getOptions().length > 0) { + return StepResult.Exit; + } + + return StepResult.Continue; } /** @@ -129,7 +175,7 @@ public void printHelp() { * @return The list of Strings representing filenames from the options given to the script */ public List getFileNamesFromInputStreamOptions() { - List fileNames = new LinkedList<>(); + List fileNames = new ArrayList<>(); for (Option option : getScriptConfiguration().getOptions().getOptions()) { if (option.getType() == InputStream.class && @@ -151,11 +197,15 @@ public UUID getEpersonIdentifier() { } /** - * Generic setter for the epersonIdentifier - * This EPerson identifier variable is the uuid of the eperson that's running the script + * Generic setter for the epersonIdentifier. + * This EPerson identifier variable is the UUID of the EPerson that's running the script. * @param epersonIdentifier The epersonIdentifier to be set on this DSpaceRunnable */ public void setEpersonIdentifier(UUID epersonIdentifier) { this.epersonIdentifier = epersonIdentifier; } + + public enum StepResult { + Continue, Exit; + } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/Process.java b/dspace-api/src/main/java/org/dspace/scripts/Process.java index 574ba5976051..eab3ba460c09 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/Process.java +++ b/dspace-api/src/main/java/org/dspace/scripts/Process.java @@ -7,9 +7,10 @@ */ package org.dspace.scripts; +import java.util.ArrayList; import java.util.Date; -import java.util.LinkedList; import java.util.List; +import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; @@ -20,6 +21,7 @@ import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; +import javax.persistence.Lob; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; @@ -33,6 +35,8 @@ import org.dspace.content.ProcessStatus; import org.dspace.core.ReloadableEntity; import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.hibernate.annotations.Type; /** * This class is the DB Entity representation of the Process object to be stored in the Database @@ -66,6 +70,8 @@ public class Process implements ReloadableEntity { @Enumerated(EnumType.STRING) private ProcessStatus processStatus; + @Lob + @Type(type = "org.hibernate.type.TextType") @Column(name = "parameters") private String parameters; @@ -77,6 +83,17 @@ public class Process implements ReloadableEntity { ) private List bitstreams; + /* + * Special Groups associated with this Process + */ + @ManyToMany(fetch = FetchType.LAZY, cascade = {CascadeType.PERSIST}) + @JoinTable( + name = "process2group", + joinColumns = {@JoinColumn(name = "process_id")}, + inverseJoinColumns = {@JoinColumn(name = "group_id")} + ) + private List groups; + @Column(name = "creation_time", nullable = false) @Temporal(TemporalType.TIMESTAMP) private Date creationTime; @@ -91,6 +108,7 @@ protected Process() { * This method returns the ID that the Process holds within the Database * @return The ID that the process holds within the database */ + @Override public Integer getID() { return processId; } @@ -162,7 +180,8 @@ public void setProcessStatus(ProcessStatus processStatus) { /** * To get the parameters, use ProcessService.getParameters() to get a parsed list of DSpaceCommandLineParameters - * This String representation is the parameter in an unparsed fashion. For example "-c test" + * This String representation is the parameter in an unparsed fashion.For example "-c test" + * @return the raw parameter string. */ protected String getParameters() { return parameters; @@ -179,7 +198,7 @@ public void setParameters(String parameters) { */ public List getBitstreams() { if (bitstreams == null) { - bitstreams = new LinkedList<>(); + bitstreams = new ArrayList<>(); } return bitstreams; } @@ -209,6 +228,21 @@ public Date getCreationTime() { return creationTime; } + /** + * This method will return the special groups associated with the Process. + */ + public List getGroups() { + return groups; + } + + /** + * This method sets the special groups associated with the Process. + * @param groups The special groups of this process. + */ + public void setGroups(List groups) { + this.groups = groups; + } + /** * Return true if other is the same Process * as this object, false otherwise diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java index aa193f30bc8b..2e14aeaa36c0 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java @@ -21,6 +21,7 @@ import java.util.Date; import java.util.HashSet; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; @@ -41,8 +42,9 @@ import org.dspace.content.service.MetadataFieldService; import org.dspace.core.Constants; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; import org.dspace.eperson.service.EPersonService; import org.dspace.scripts.service.ProcessService; import org.springframework.beans.factory.annotation.Autowired; @@ -74,15 +76,23 @@ public class ProcessServiceImpl implements ProcessService { @Override public Process create(Context context, EPerson ePerson, String scriptName, - List parameters) throws SQLException { + List parameters, + final Set specialGroups) throws SQLException { Process process = new Process(); process.setEPerson(ePerson); process.setName(scriptName); process.setParameters(DSpaceCommandLineParameter.concatenate(parameters)); process.setCreationTime(new Date()); + Optional.ofNullable(specialGroups) + .ifPresent(sg -> { + // we use a set to be sure no duplicated special groups are stored with process + Set specialGroupsSet = new HashSet<>(sg); + process.setGroups(new ArrayList<>(specialGroupsSet)); + }); + Process createdProcess = processDAO.create(context, process); - log.info(LogManager.getHeader(context, "process_create", + log.info(LogHelper.getHeader(context, "process_create", "Process has been created for eperson with email " + ePerson.getEmail() + " with ID " + createdProcess.getID() + " and scriptName " + scriptName + " and parameters " + parameters)); @@ -119,12 +129,17 @@ public List findAllSortByStartTime(Context context) throws SQLException return processes; } + @Override + public List findByUser(Context context, EPerson eperson, int limit, int offset) throws SQLException { + return processDAO.findByUser(context, eperson, limit, offset); + } + @Override public void start(Context context, Process process) throws SQLException { process.setProcessStatus(ProcessStatus.RUNNING); process.setStartTime(new Date()); update(context, process); - log.info(LogManager.getHeader(context, "process_start", "Process with ID " + process.getID() + log.info(LogHelper.getHeader(context, "process_start", "Process with ID " + process.getID() + " and name " + process.getName() + " has started")); } @@ -134,7 +149,7 @@ public void fail(Context context, Process process) throws SQLException { process.setProcessStatus(ProcessStatus.FAILED); process.setFinishedTime(new Date()); update(context, process); - log.info(LogManager.getHeader(context, "process_fail", "Process with ID " + process.getID() + log.info(LogHelper.getHeader(context, "process_fail", "Process with ID " + process.getID() + " and name " + process.getName() + " has failed")); } @@ -144,7 +159,7 @@ public void complete(Context context, Process process) throws SQLException { process.setProcessStatus(ProcessStatus.COMPLETED); process.setFinishedTime(new Date()); update(context, process); - log.info(LogManager.getHeader(context, "process_complete", "Process with ID " + process.getID() + log.info(LogHelper.getHeader(context, "process_complete", "Process with ID " + process.getID() + " and name " + process.getName() + " has been completed")); } @@ -177,7 +192,7 @@ public void delete(Context context, Process process) throws SQLException, IOExce bitstreamService.delete(context, bitstream); } processDAO.delete(context, process); - log.info(LogManager.getHeader(context, "process_delete", "Process with ID " + process.getID() + log.info(LogHelper.getHeader(context, "process_delete", "Process with ID " + process.getID() + " and name " + process.getName() + " has been deleted")); } @@ -295,6 +310,17 @@ public void createLogBitstream(Context context, Process process) tempFile.delete(); } + @Override + public List findByStatusAndCreationTimeOlderThan(Context context, List statuses, + Date date) throws SQLException { + return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date); + } + + @Override + public int countByUser(Context context, EPerson user) throws SQLException { + return processDAO.countByUser(context, user); + } + private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); StringBuilder sb = new StringBuilder(); diff --git a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java index 4eb7cdbbc164..abb700cb10c9 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java @@ -8,6 +8,7 @@ package org.dspace.scripts; import java.lang.reflect.InvocationTargetException; +import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; @@ -36,7 +37,9 @@ public ScriptConfiguration getScriptConfiguration(String name) { @Override public List getScriptConfigurations(Context context) { return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter( - scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context)).collect(Collectors.toList()); + scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context, null)) + .sorted(Comparator.comparing(ScriptConfiguration::getName)) + .collect(Collectors.toList()); } @Override diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index 4b15c22f444a..bbedab04e278 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -7,17 +7,29 @@ */ package org.dspace.scripts.configuration; +import java.sql.SQLException; +import java.util.List; + +import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceRunnable; import org.springframework.beans.factory.BeanNameAware; +import org.springframework.beans.factory.annotation.Autowired; /** * This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this - * and represent a script's configuration + * and represent a script's configuration. + * By default script are available only to repository administrators script that have a broader audience + * must override the {@link #isAllowedToExecute(Context, List)} method. */ public abstract class ScriptConfiguration implements BeanNameAware { + @Autowired + protected AuthorizeService authorizeService; + /** * The possible options for this script */ @@ -70,14 +82,23 @@ public void setName(String name) { * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration */ public abstract void setDspaceRunnableClass(Class dspaceRunnableClass); + /** * This method will return if the script is allowed to execute in the given context. This is by default set * to the currentUser in the context being an admin, however this can be overwritten by each script individually * if different rules apply * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise * @return A boolean indicating whether the script is allowed to execute or not */ - public abstract boolean isAllowedToExecute(Context context); + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } /** * The getter for the options of the Script @@ -85,6 +106,19 @@ public void setName(String name) { */ public abstract Options getOptions(); + /** + * The getter for the options of the Script (help informations) + * + * @return the options value of this ScriptConfiguration for help + */ + public Options getHelpOptions() { + Options options = new Options(); + + options.addOption(Option.builder("h").longOpt("help").desc("help").hasArg(false).required(false).build()); + + return options; + } + @Override public void setBeanName(String beanName) { this.name = beanName; diff --git a/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java b/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java index 078ba6bfa2e6..223a73dad739 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java +++ b/dspace-api/src/main/java/org/dspace/scripts/handler/DSpaceRunnableHandler.java @@ -10,7 +10,9 @@ import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.util.List; import java.util.Optional; +import java.util.UUID; import org.apache.commons.cli.Options; import org.dspace.authorize.AuthorizeException; @@ -77,6 +79,13 @@ public interface DSpaceRunnableHandler { */ public void logError(String message); + /** + * This method will perform the error logging of the message given along with a stack trace + * @param message The message to be logged as an error + * @param throwable The original exception + */ + public void logError(String message, Throwable throwable); + /** * This method will print the help for the options and name * @param options The options for the script @@ -107,4 +116,12 @@ public interface DSpaceRunnableHandler { */ public void writeFilestream(Context context, String fileName, InputStream inputStream, String type) throws IOException, SQLException, AuthorizeException; + + /** + * This method will return a List of UUIDs for the special groups + * associated with the processId contained by specific implementations of this interface. + * Otherwise, it returns an empty collection. + * @return List containing UUIDs of Special Groups of the associated Process. + */ + public List getSpecialGroups(); } diff --git a/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java b/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java index 6775b9a455d5..8a7f41d9582d 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java +++ b/dspace-api/src/main/java/org/dspace/scripts/handler/impl/CommandLineDSpaceRunnableHandler.java @@ -10,7 +10,10 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.util.Collections; +import java.util.List; import java.util.Optional; +import java.util.UUID; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; @@ -84,6 +87,12 @@ public void logError(String message) { log.error(message); } + @Override + public void logError(String message, Throwable throwable) { + System.err.println(message); + log.error(message, throwable); + } + @Override public void printHelp(Options options, String name) { if (options != null) { @@ -107,4 +116,9 @@ public void writeFilestream(Context context, String fileName, InputStream inputS File file = new File(fileName); FileUtils.copyInputStreamToFile(inputStream, file); } + + @Override + public List getSpecialGroups() { + return Collections.emptyList(); + } } diff --git a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java index 27c0c75a35a7..c6fc24888155 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java +++ b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java @@ -10,12 +10,16 @@ import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.util.Date; import java.util.List; +import java.util.Set; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Bitstream; +import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessLogLevel; @@ -32,11 +36,14 @@ public interface ProcessService { * @param ePerson The ePerson for which this process will be created on * @param scriptName The script name to be used for the process * @param parameters The parameters to be used for the process + * @param specialGroups Allows to set special groups, associated with application context when process is created, + * other than the ones derived from the eperson membership. * @return The created process * @throws SQLException If something goes wrong */ public Process create(Context context, EPerson ePerson, String scriptName, - List parameters) throws SQLException; + List parameters, + final Set specialGroups) throws SQLException; /** * This method will retrieve a Process object from the Database with the given ID @@ -235,4 +242,39 @@ List search(Context context, ProcessQueryParameterContainer processQuer */ void createLogBitstream(Context context, Process process) throws IOException, SQLException, AuthorizeException; + + /** + * Find all the processes with one of the given status and with a creation time + * older than the specified date. + * + * @param context The relevant DSpace context + * @param statuses the statuses of the processes to search for + * @param date the creation date to search for + * @return The list of all Processes which match requirements + * @throws AuthorizeException If something goes wrong + */ + List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) + throws SQLException; + + /** + * Returns a list of all Process objects in the database by the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Process objects in the Database + * @throws SQLException If something goes wrong + */ + List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException; + + /** + * Count all the processes which is related to the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countByUser(Context context, EPerson user) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/search/Harvest.java b/dspace-api/src/main/java/org/dspace/search/Harvest.java index a0b3698592a5..773d45a6ab29 100644 --- a/dspace-api/src/main/java/org/dspace/search/Harvest.java +++ b/dspace-api/src/main/java/org/dspace/search/Harvest.java @@ -11,7 +11,6 @@ import java.text.ParseException; import java.util.ArrayList; import java.util.Iterator; -import java.util.LinkedList; import java.util.List; import org.apache.logging.log4j.Logger; @@ -43,7 +42,6 @@ * withdrawn within a particular range of dates. * * @author Robert Tansley - * @version $Revision$ */ public class Harvest { /** @@ -129,7 +127,7 @@ public static List harvest(Context context, DSpaceObject scop // several smaller operations (e.g. for OAI resumption tokens.) discoverQuery.setSortField("search.resourceid", DiscoverQuery.SORT_ORDER.asc); - List infoObjects = new LinkedList(); + List infoObjects = new ArrayList<>(); // Count of items read from the record set that match the selection criteria. // Note : Until 'index > offset' the records are not added to the output set. @@ -155,7 +153,7 @@ public static List harvest(Context context, DSpaceObject scop if (collections) { // Add collections data - fillCollections(context, itemInfo); + fillCollections(itemInfo); } if (items) { @@ -163,7 +161,7 @@ public static List harvest(Context context, DSpaceObject scop itemInfo.item = itemService.find(context, itemInfo.itemID); } - if ((nonAnon) || (itemInfo.item == null) || (withdrawn && itemInfo.withdrawn)) { + if (nonAnon || (itemInfo.item == null) || (withdrawn && itemInfo.withdrawn)) { index++; if (index > offset) { infoObjects.add(itemInfo); @@ -221,7 +219,7 @@ public static HarvestedItemInfo getSingle(Context context, String handle, // Get the sets if (collections) { - fillCollections(context, itemInfo); + fillCollections(itemInfo); } return itemInfo; @@ -230,12 +228,10 @@ public static HarvestedItemInfo getSingle(Context context, String handle, /** * Fill out the containers field of the HarvestedItemInfo object * - * @param context DSpace context * @param itemInfo HarvestedItemInfo object to fill out * @throws SQLException if database error */ - private static void fillCollections(Context context, - HarvestedItemInfo itemInfo) throws SQLException { + private static void fillCollections(HarvestedItemInfo itemInfo) throws SQLException { // Get the collection Handles from DB List collections = itemInfo.item.getCollections(); itemInfo.collectionHandles = new ArrayList<>(); diff --git a/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java b/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java index f63a7a4f9197..e83aa93e3362 100644 --- a/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/service/impl/ClientInfoServiceImpl.java @@ -7,8 +7,12 @@ */ package org.dspace.service.impl; +import static org.apache.commons.lang3.StringUtils.ordinalIndexOf; + +import java.net.Inet4Address; import javax.servlet.http.HttpServletRequest; +import com.google.common.net.InetAddresses; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.dspace.core.Utils; @@ -66,6 +70,13 @@ public String getClientIp(String remoteIp, String xForwardedForHeaderValue) { "To trust X-Forwarded-For headers, set useProxies=true."); } + if (isIPv4Address(ip)) { + int ipAnonymizationBytes = getIpAnonymizationBytes(); + if (ipAnonymizationBytes > 0) { + ip = anonymizeIpAddress(ip, ipAnonymizationBytes); + } + } + return ip; } @@ -139,7 +150,7 @@ private IPTable parseTrustedProxyRanges() { // If our IPTable is not empty, log the trusted proxies and return it if (!ipTable.isEmpty()) { - log.info("Trusted proxies (configure via 'proxies.trusted.ipranges'): {}", ipTable.toSet().toString()); + log.info("Trusted proxies (configure via 'proxies.trusted.ipranges'): {}", ipTable); return ipTable; } else { return null; @@ -192,4 +203,38 @@ private String getXForwardedForIpValue(String remoteIp, String xForwardedForValu return ip; } + + /** + * Anonymize the given IP address by setting the last specified bytes to 0 + * @param ipAddress the ip address to be anonymize + * @param bytes the number of bytes to be set to 0 + * @return the modified ip address + */ + private String anonymizeIpAddress(String ipAddress, int bytes) { + + if (bytes > 4) { + log.warn("It is not possible to anonymize " + bytes + " bytes of an IPv4 address."); + return ipAddress; + } + + if (bytes == 4) { + return "0.0.0.0"; + } + + String zeroSuffix = StringUtils.repeat(".0", bytes); + return removeLastBytes(ipAddress, bytes) + zeroSuffix; + + } + + private String removeLastBytes(String ipAddress, int bytes) { + return ipAddress.substring(0, ordinalIndexOf(ipAddress, ".", 4 - bytes)); + } + + private int getIpAnonymizationBytes() { + return configurationService.getIntProperty("client.ip-anonymization.parts", 0); + } + + private boolean isIPv4Address(String ipAddress) { + return InetAddresses.forString(ipAddress) instanceof Inet4Address; + } } diff --git a/dspace-api/src/main/java/org/dspace/service/impl/HttpConnectionPoolService.java b/dspace-api/src/main/java/org/dspace/service/impl/HttpConnectionPoolService.java new file mode 100644 index 000000000000..c5f7c46b586e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/service/impl/HttpConnectionPoolService.java @@ -0,0 +1,196 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.service.impl; + +import java.util.concurrent.TimeUnit; +import javax.annotation.PostConstruct; +import javax.inject.Inject; +import javax.inject.Named; +import javax.inject.Singleton; + +import org.apache.http.HeaderElement; +import org.apache.http.HeaderElementIterator; +import org.apache.http.HttpResponse; +import org.apache.http.conn.ConnectionKeepAliveStrategy; +import org.apache.http.conn.HttpClientConnectionManager; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.message.BasicHeaderElementIterator; +import org.apache.http.protocol.HTTP; +import org.apache.http.protocol.HttpContext; +import org.dspace.services.ConfigurationService; + +/** + * Factory for HTTP clients sharing a pool of connections. + * + *

    You may create multiple pools. Each is identified by a configuration + * "prefix" (passed to the constructor) which is used to create names of + * properties which will configure the pool. The properties are: + * + *

    + *
    PREFIX.client.keepAlive
    + *
    Default keep-alive time for open connections, in milliseconds
    + *
    PREFIX.client.maxTotalConnections
    + *
    maximum open connections
    + *
    PREFIX.client.maxPerRoute
    + *
    maximum open connections per service instance
    + *
    PREFIX.client.timeToLive
    + *
    maximum lifetime of a pooled connection, in seconds
    + *
    + * + * @author Mark H. Wood + */ +@Named +@Singleton +public class HttpConnectionPoolService { + @Inject + ConfigurationService configurationService; + + /** Configuration properties will begin with this string. */ + private final String configPrefix; + + /** Maximum number of concurrent pooled connections. */ + private static final int DEFAULT_MAX_TOTAL_CONNECTIONS = 20; + + /** Maximum number of concurrent pooled connections per route. */ + private static final int DEFAULT_MAX_PER_ROUTE = 15; + + /** Keep connections open at least this long, if the response did not + * specify: milliseconds + */ + private static final int DEFAULT_KEEPALIVE = 5 * 1000; + + /** Pooled connection maximum lifetime: seconds */ + private static final int DEFAULT_TTL = 10 * 60; + + /** Clean up stale connections this often: milliseconds */ + private static final int CHECK_INTERVAL = 1000; + + /** Connection idle if unused for this long: seconds */ + private static final int IDLE_INTERVAL = 30; + + private PoolingHttpClientConnectionManager connManager; + + private final ConnectionKeepAliveStrategy keepAliveStrategy + = new KeepAliveStrategy(); + + /** + * Construct a pool for a given set of configuration properties. + * + * @param configPrefix Configuration property names will begin with this. + */ + public HttpConnectionPoolService(String configPrefix) { + this.configPrefix = configPrefix; + } + + @PostConstruct + protected void init() { + connManager = new PoolingHttpClientConnectionManager( + configurationService.getIntProperty(configPrefix + ".client.timeToLive", DEFAULT_TTL), + TimeUnit.SECONDS); + + connManager.setMaxTotal(configurationService.getIntProperty( + configPrefix + ".client.maxTotalConnections", DEFAULT_MAX_TOTAL_CONNECTIONS)); + connManager.setDefaultMaxPerRoute( + configurationService.getIntProperty(configPrefix + ".client.maxPerRoute", + DEFAULT_MAX_PER_ROUTE)); + + Thread connectionMonitor = new IdleConnectionMonitorThread(connManager); + connectionMonitor.setDaemon(true); + connectionMonitor.start(); + } + + /** + * Create an HTTP client which uses a pooled connection. + * + * @return the client. + */ + public CloseableHttpClient getClient() { + CloseableHttpClient httpClient = HttpClientBuilder.create() + .setKeepAliveStrategy(keepAliveStrategy) + .setConnectionManager(connManager) + .build(); + return httpClient; + } + + /** + * A connection keep-alive strategy that obeys the Keep-Alive header and + * applies a default if none is given. + * + * Swiped from https://www.baeldung.com/httpclient-connection-management + */ + public class KeepAliveStrategy + implements ConnectionKeepAliveStrategy { + @Override + public long getKeepAliveDuration(HttpResponse response, + HttpContext context) { + HeaderElementIterator it = new BasicHeaderElementIterator( + response.headerIterator(HTTP.CONN_KEEP_ALIVE)); + while (it.hasNext()) { + HeaderElement he = it.nextElement(); + String name = he.getName(); + String value = he.getValue(); + if (value != null && "timeout".equalsIgnoreCase(name)) { + return Long.parseLong(value) * 1000; + } + } + + // If server did not request keep-alive, use configured value. + return configurationService.getIntProperty(configPrefix + ".client.keepAlive", + DEFAULT_KEEPALIVE); + } + } + + /** + * Clean up stale connections. + * + * Swiped from https://www.baeldung.com/httpclient-connection-management + */ + public class IdleConnectionMonitorThread + extends Thread { + private final HttpClientConnectionManager connMgr; + private volatile boolean shutdown; + + /** + * Constructor. + * + * @param connMgr the manager to be monitored. + */ + public IdleConnectionMonitorThread( + PoolingHttpClientConnectionManager connMgr) { + super(); + this.connMgr = connMgr; + } + + @Override + public void run() { + try { + while (!shutdown) { + synchronized (this) { + wait(CHECK_INTERVAL); + connMgr.closeExpiredConnections(); + connMgr.closeIdleConnections(IDLE_INTERVAL, TimeUnit.SECONDS); + } + } + } catch (InterruptedException ex) { + shutdown(); + } + } + + /** + * Cause a controlled exit from the thread. + */ + public void shutdown() { + shutdown = true; + synchronized (this) { + notifyAll(); + } + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java index 4b3e1886627f..fdaaef98b5d6 100644 --- a/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java +++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormat.java @@ -86,23 +86,23 @@ public static String makeSortString(String value, String language, String type) } // No delegates found, so apply defaults - if (type.equalsIgnoreCase(OrderFormat.AUTHOR) && authorDelegate != null) { + if (type.equalsIgnoreCase(OrderFormat.AUTHOR)) { return authorDelegate.makeSortString(value, language); } - if (type.equalsIgnoreCase(OrderFormat.TITLE) && titleDelegate != null) { + if (type.equalsIgnoreCase(OrderFormat.TITLE)) { return titleDelegate.makeSortString(value, language); } - if (type.equalsIgnoreCase(OrderFormat.TEXT) && textDelegate != null) { + if (type.equalsIgnoreCase(OrderFormat.TEXT)) { return textDelegate.makeSortString(value, language); } - if (type.equalsIgnoreCase(OrderFormat.DATE) && dateDelegate != null) { + if (type.equalsIgnoreCase(OrderFormat.DATE)) { return dateDelegate.makeSortString(value, language); } - if (type.equalsIgnoreCase(OrderFormat.AUTHORITY) && authorityDelegate != null) { + if (type.equalsIgnoreCase(OrderFormat.AUTHORITY)) { return authorityDelegate.makeSortString(value, language); } } diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormatAuthor.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormatAuthor.java index a38530606e47..d0fdeba112c9 100644 --- a/dspace-api/src/main/java/org/dspace/sort/OrderFormatAuthor.java +++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormatAuthor.java @@ -9,6 +9,7 @@ import org.dspace.text.filter.DecomposeDiactritics; import org.dspace.text.filter.LowerCaseAndTrim; +import org.dspace.text.filter.StripDiacritics; import org.dspace.text.filter.TextFilter; /** @@ -19,6 +20,7 @@ public class OrderFormatAuthor extends AbstractTextFilterOFD { { filters = new TextFilter[] {new DecomposeDiactritics(), + new StripDiacritics(), new LowerCaseAndTrim()}; } } diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormatText.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormatText.java index 403034f675c5..e108328687df 100644 --- a/dspace-api/src/main/java/org/dspace/sort/OrderFormatText.java +++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormatText.java @@ -9,6 +9,7 @@ import org.dspace.text.filter.DecomposeDiactritics; import org.dspace.text.filter.LowerCaseAndTrim; +import org.dspace.text.filter.StripDiacritics; import org.dspace.text.filter.TextFilter; /** @@ -19,6 +20,7 @@ public class OrderFormatText extends AbstractTextFilterOFD { { filters = new TextFilter[] {new DecomposeDiactritics(), + new StripDiacritics(), new LowerCaseAndTrim()}; } } diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java index eb3586dc616c..b745f0719cb7 100644 --- a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java +++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitle.java @@ -10,6 +10,7 @@ import org.dspace.text.filter.DecomposeDiactritics; import org.dspace.text.filter.LowerCaseAndTrim; import org.dspace.text.filter.StandardInitialArticleWord; +import org.dspace.text.filter.StripDiacritics; import org.dspace.text.filter.TextFilter; /** @@ -21,6 +22,7 @@ public class OrderFormatTitle extends AbstractTextFilterOFD { { filters = new TextFilter[] {new StandardInitialArticleWord(), new DecomposeDiactritics(), + new StripDiacritics(), new LowerCaseAndTrim()}; } } diff --git a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java index 670e5c87e591..fa9ba297258a 100644 --- a/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java +++ b/dspace-api/src/main/java/org/dspace/sort/OrderFormatTitleMarc21.java @@ -10,6 +10,7 @@ import org.dspace.text.filter.DecomposeDiactritics; import org.dspace.text.filter.LowerCaseAndTrim; import org.dspace.text.filter.MARC21InitialArticleWord; +import org.dspace.text.filter.StripDiacritics; import org.dspace.text.filter.StripLeadingNonAlphaNum; import org.dspace.text.filter.TextFilter; @@ -22,6 +23,7 @@ public class OrderFormatTitleMarc21 extends AbstractTextFilterOFD { { filters = new TextFilter[] {new MARC21InitialArticleWord(), new DecomposeDiactritics(), + new StripDiacritics(), new StripLeadingNonAlphaNum(), new LowerCaseAndTrim()}; } diff --git a/dspace-api/src/main/java/org/dspace/statistics/AnonymizeStatistics.java b/dspace-api/src/main/java/org/dspace/statistics/AnonymizeStatistics.java index 279b2f42150d..ef2a612133c9 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/AnonymizeStatistics.java +++ b/dspace-api/src/main/java/org/dspace/statistics/AnonymizeStatistics.java @@ -16,7 +16,7 @@ import static org.apache.commons.cli.Option.builder; import static org.apache.commons.lang.time.DateFormatUtils.format; import static org.apache.logging.log4j.LogManager.getLogger; -import static org.dspace.core.LogManager.getHeader; +import static org.dspace.core.LogHelper.getHeader; import static org.dspace.statistics.SolrLoggerServiceImpl.DATE_FORMAT_8601; import java.io.IOException; diff --git a/dspace-api/src/main/java/org/dspace/statistics/DataTermsFacet.java b/dspace-api/src/main/java/org/dspace/statistics/DataTermsFacet.java deleted file mode 100644 index 9de06b7bb8e5..000000000000 --- a/dspace-api/src/main/java/org/dspace/statistics/DataTermsFacet.java +++ /dev/null @@ -1,70 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.statistics; - -import java.util.ArrayList; -import java.util.List; - -import com.google.gson.Gson; - -/** - * A neutral data object to hold data for statistics. - */ -public class DataTermsFacet { - private List terms; - - public DataTermsFacet() { - terms = new ArrayList(); - } - - public void addTermFacet(TermsFacet termsFacet) { - terms.add(termsFacet); - } - - /** - * Render this data object into JSON format. - * - * An example of the output could be of the format: - * [{"term":"247166","count":10},{"term":"247168","count":6}] - * - * @return JSON-formatted data. - */ - public String toJson() { - Gson gson = new Gson(); - return gson.toJson(terms); - } - - - public static class TermsFacet { - private String term; - private Integer count; - - public TermsFacet(String term, Integer count) { - setTerm(term); - setCount(count); - } - - public String getTerm() { - return term; - } - - public void setTerm(String term) { - this.term = term; - } - - public Integer getCount() { - return count; - } - - public void setCount(Integer count) { - this.count = count; - } - - - } -} diff --git a/dspace-api/src/main/java/org/dspace/statistics/Dataset.java b/dspace-api/src/main/java/org/dspace/statistics/Dataset.java index 307b6e40ad49..859a746e616f 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/Dataset.java +++ b/dspace-api/src/main/java/org/dspace/statistics/Dataset.java @@ -10,6 +10,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; import java.text.DecimalFormat; import java.text.ParseException; import java.util.ArrayList; @@ -24,7 +25,7 @@ /** * @author kevinvandevelde at atmire.com - * Date: 21-jan-2009 + * Date: 21-Jan-2009 * Time: 13:44:48 */ public class Dataset { @@ -66,20 +67,20 @@ public Dataset(String[][] matrix) { } private void initRowLabels(int rows) { - rowLabels = new ArrayList(rows); - rowLabelsAttrs = new ArrayList>(); + rowLabels = new ArrayList<>(rows); + rowLabelsAttrs = new ArrayList<>(); for (int i = 0; i < rows; i++) { rowLabels.add("Row " + (i + 1)); - rowLabelsAttrs.add(new HashMap()); + rowLabelsAttrs.add(new HashMap<>()); } } private void initColumnLabels(int nbCols) { - colLabels = new ArrayList(nbCols); - colLabelsAttrs = new ArrayList>(); + colLabels = new ArrayList<>(nbCols); + colLabelsAttrs = new ArrayList<>(); for (int i = 0; i < nbCols; i++) { colLabels.add("Column " + (i + 1)); - colLabelsAttrs.add(new HashMap()); + colLabelsAttrs.add(new HashMap<>()); } } @@ -233,7 +234,9 @@ public void flipRowCols() { public ByteArrayOutputStream exportAsCSV() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); - ICSVWriter ecsvp = new CSVWriterBuilder(new OutputStreamWriter(baos)).withSeparator(';').build(); + ICSVWriter ecsvp = new CSVWriterBuilder(new OutputStreamWriter(baos, StandardCharsets.UTF_8)) + .withSeparator(';') + .build(); //Generate the item row List colLabels = getColLabels(); colLabels.add(0, ""); diff --git a/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java b/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java new file mode 100644 index 000000000000..40fea6cf54da --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java @@ -0,0 +1,57 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; + +import com.maxmind.geoip2.DatabaseReader; +import org.apache.commons.lang3.StringUtils; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Service that handle the GeoIP database file. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GeoIpService { + + @Autowired + private ConfigurationService configurationService; + + /** + * Returns an instance of {@link DatabaseReader} based on the configured db + * file, if any. + * + * @return the Database reader + * @throws IllegalStateException if the db file is not configured correctly + */ + public DatabaseReader getDatabaseReader() throws IllegalStateException { + String dbPath = configurationService.getProperty("usage-statistics.dbfile"); + if (StringUtils.isBlank(dbPath)) { + throw new IllegalStateException("The required 'dbfile' configuration is missing in usage-statistics.cfg!"); + } + + try { + File dbFile = new File(dbPath); + return new DatabaseReader.Builder(dbFile).build(); + } catch (FileNotFoundException fe) { + throw new IllegalStateException( + "The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " + + "based reports! Please see the DSpace installation instructions for instructions to install " + + "this file.",fe); + } catch (IOException e) { + throw new IllegalStateException( + "Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the " + + "DSpace installation instructions for more details.", e); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index 9cc032a998b9..5f976bbfd94b 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -8,7 +8,6 @@ package org.dspace.statistics; import java.io.File; -import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; @@ -18,9 +17,12 @@ import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; +import java.net.URI; import java.net.URLEncoder; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.nio.file.Paths; import java.sql.SQLException; import java.text.DateFormat; import java.text.ParseException; @@ -79,6 +81,7 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.util.NamedList; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Collection; @@ -142,6 +145,10 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea private ClientInfoService clientInfoService; @Autowired private SolrStatisticsCore solrStatisticsCore; + @Autowired + private GeoIpService geoIpService; + @Autowired + private AuthorizeService authorizeService; /** URL to the current-year statistics core. Prior-year shards will have a year suffixed. */ private String statisticsCoreURL; @@ -173,32 +180,29 @@ protected SolrLoggerServiceImpl() { @Override public void afterPropertiesSet() throws Exception { + statisticsCoreURL = configurationService.getProperty("solr-statistics.server"); + + if (null != statisticsCoreURL) { + Path statisticsPath = Paths.get(new URI(statisticsCoreURL).getPath()); + statisticsCoreBase = statisticsPath + .getName(statisticsPath.getNameCount() - 1) + .toString(); + } else { + log.warn("Unable to find solr-statistics.server parameter in DSpace configuration. This is required for " + + "sharding statistics."); + statisticsCoreBase = null; + } + solr = solrStatisticsCore.getSolr(); // Read in the file so we don't have to do it all the time //spiderIps = SpiderDetector.getSpiderIpAddresses(); DatabaseReader service = null; - // Get the db file for the location - String dbPath = configurationService.getProperty("usage-statistics.dbfile"); - if (dbPath != null) { - try { - File dbFile = new File(dbPath); - service = new DatabaseReader.Builder(dbFile).build(); - } catch (FileNotFoundException fe) { - log.error( - "The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " + - "based reports! Please see the DSpace installation instructions for instructions to install " + - "this file.", - fe); - } catch (IOException e) { - log.error( - "Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the " + - "DSpace installation instructions for more details.", - e); - } - } else { - log.error("The required 'dbfile' configuration is missing in solr-statistics.cfg!"); + try { + service = geoIpService.getDatabaseReader(); + } catch (IllegalStateException ex) { + log.error(ex); } locationService = service; } @@ -212,14 +216,30 @@ public void post(DSpaceObject dspaceObject, HttpServletRequest request, @Override public void postView(DSpaceObject dspaceObject, HttpServletRequest request, EPerson currentUser) { - if (solr == null || locationService == null) { + postView(dspaceObject, request, currentUser, null); + } + + @Override + public void postView(DSpaceObject dspaceObject, HttpServletRequest request, + EPerson currentUser, String referrer) { + Context context = new Context(); + // Do not record statistics for Admin users + try { + if (authorizeService.isAdmin(context, currentUser)) { + return; + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + + if (solr == null) { return; } initSolrYearCores(); try { - SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, request, currentUser); + SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, request, currentUser, referrer); if (doc1 == null) { return; } @@ -253,14 +273,20 @@ public void postView(DSpaceObject dspaceObject, HttpServletRequest request, @Override public void postView(DSpaceObject dspaceObject, String ip, String userAgent, String xforwardedfor, EPerson currentUser) { - if (solr == null || locationService == null) { + postView(dspaceObject, ip, userAgent, xforwardedfor, currentUser, null); + } + + @Override + public void postView(DSpaceObject dspaceObject, + String ip, String userAgent, String xforwardedfor, EPerson currentUser, String referrer) { + if (solr == null) { return; } initSolrYearCores(); try { SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, ip, userAgent, xforwardedfor, - currentUser); + currentUser, referrer); if (doc1 == null) { return; } @@ -301,6 +327,22 @@ public void postView(DSpaceObject dspaceObject, */ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServletRequest request, EPerson currentUser) throws SQLException { + return getCommonSolrDoc(dspaceObject, request, currentUser, null); + } + + /** + * Returns a solr input document containing common information about the statistics + * regardless if we are logging a search or a view of a DSpace object + * + * @param dspaceObject the object used. + * @param request the current request context. + * @param currentUser the current session's user. + * @param referrer the optional referrer. + * @return a solr input document + * @throws SQLException in case of a database exception + */ + protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServletRequest request, + EPerson currentUser, String referrer) throws SQLException { boolean isSpiderBot = request != null && SpiderDetector.isSpider(request); if (isSpiderBot && !configurationService.getBooleanProperty("usage-statistics.logBots", true)) { @@ -323,7 +365,9 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServ } //Also store the referrer - if (request.getHeader("referer") != null) { + if (referrer != null) { + doc1.addField("referrer", referrer); + } else if (request.getHeader("referer") != null) { doc1.addField("referrer", request.getHeader("referer")); } @@ -392,7 +436,8 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, HttpServ } protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, String ip, String userAgent, - String xforwardedfor, EPerson currentUser) throws SQLException { + String xforwardedfor, EPerson currentUser, + String referrer) throws SQLException { boolean isSpiderBot = SpiderDetector.isSpider(ip); if (isSpiderBot && !configurationService.getBooleanProperty("usage-statistics.logBots", true)) { @@ -413,6 +458,11 @@ protected SolrInputDocument getCommonSolrDoc(DSpaceObject dspaceObject, String i doc1.addField("ip", ip); } + // Add the referrer, if present + if (referrer != null) { + doc1.addField("referrer", referrer); + } + InetAddress ipAddress = null; try { String dns; @@ -1166,22 +1216,6 @@ public String getIgnoreSpiderIPs() { } - @Override - public void optimizeSOLR() { - try { - long start = System.currentTimeMillis(); - System.out.println("SOLR Optimize -- Process Started:" + start); - solr.optimize(); - long finish = System.currentTimeMillis(); - System.out.println("SOLR Optimize -- Process Finished:" + finish); - System.out.println("SOLR Optimize -- Total time taken:" + (finish - start) + " (ms)."); - } catch (SolrServerException sse) { - System.err.println(sse.getMessage()); - } catch (IOException ioe) { - System.err.println(ioe.getMessage()); - } - } - @Override public void shardSolrIndex() throws IOException, SolrServerException { if (!(solr instanceof HttpSolrClient)) { @@ -1654,11 +1688,14 @@ protected synchronized void initSolrYearCores() { statisticYearCores .add(baseSolrUrl.replace("http://", "").replace("https://", "") + statCoreName); } - //Also add the core containing the current year ! - statisticYearCores.add(((HttpSolrClient) solr) + var baseCore = ((HttpSolrClient) solr) .getBaseURL() .replace("http://", "") - .replace("https://", "")); + .replace("https://", ""); + if (!statisticYearCores.contains(baseCore)) { + //Also add the core containing the current year, if it hasn't been added already + statisticYearCores.add(baseCore); + } } catch (IOException | SolrServerException e) { log.error(e.getMessage(), e); } diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java index bcb8657ff27d..56a33a8cfb5c 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerUsageEventListener.java @@ -7,9 +7,10 @@ */ package org.dspace.statistics; -import java.util.LinkedList; +import java.util.ArrayList; import java.util.List; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.eperson.EPerson; import org.dspace.services.model.Event; @@ -28,7 +29,7 @@ */ public class SolrLoggerUsageEventListener extends AbstractUsageEventListener { - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrLoggerUsageEventListener.class); + private static final Logger log = LogManager.getLogger(SolrLoggerUsageEventListener.class); protected SolrLoggerService solrLoggerService; @@ -49,14 +50,14 @@ public void receiveEvent(Event event) { if (UsageEvent.Action.VIEW == ue.getAction()) { if (ue.getRequest() != null) { - solrLoggerService.postView(ue.getObject(), ue.getRequest(), currentUser); + solrLoggerService.postView(ue.getObject(), ue.getRequest(), currentUser, ue.getReferrer()); } else { solrLoggerService.postView(ue.getObject(), ue.getIp(), ue.getUserAgent(), ue.getXforwardedfor(), - currentUser); + currentUser, ue.getReferrer()); } } else if (UsageEvent.Action.SEARCH == ue.getAction()) { UsageSearchEvent usageSearchEvent = (UsageSearchEvent) ue; - List queries = new LinkedList<>(); + List queries = new ArrayList<>(); queries.add(usageSearchEvent.getQuery()); solrLoggerService.postSearch(usageSearchEvent.getObject(), usageSearchEvent.getRequest(), currentUser, queries, usageSearchEvent.getPage().getSize(), diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrStatisticsCore.java b/dspace-api/src/main/java/org/dspace/statistics/SolrStatisticsCore.java index 345084ef6b22..9ad72cbf313b 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrStatisticsCore.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrStatisticsCore.java @@ -9,9 +9,12 @@ import static org.apache.logging.log4j.LogManager.getLogger; +import javax.inject.Named; + import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.dspace.service.impl.HttpConnectionPoolService; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; @@ -20,13 +23,16 @@ */ public class SolrStatisticsCore { - private static Logger log = getLogger(SolrStatisticsCore.class); + private static final Logger log = getLogger(); protected SolrClient solr = null; @Autowired private ConfigurationService configurationService; + @Autowired @Named("solrHttpConnectionPoolService") + private HttpConnectionPoolService httpConnectionPoolService; + /** * Returns the {@link SolrClient} for the Statistics core. * Initializes it if needed. @@ -50,7 +56,9 @@ protected void initSolr() { log.info("usage-statistics.dbfile: {}", configurationService.getProperty("usage-statistics.dbfile")); try { - solr = new HttpSolrClient.Builder(solrService).build(); + solr = new HttpSolrClient.Builder(solrService) + .withHttpClient(httpConnectionPoolService.getClient()) + .build(); } catch (Exception e) { log.error("Error accessing Solr server configured in 'solr-statistics.server'", e); } diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java b/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java index 1152ee669c4c..a8ffbb4b40b7 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/DatasetTimeGenerator.java @@ -187,7 +187,7 @@ private int getTimeDifference(Date date1, Date date2, int type) { cal2.clear(Calendar.HOUR); cal1.clear(Calendar.HOUR_OF_DAY); cal2.clear(Calendar.HOUR_OF_DAY); - //yet i know calendar just won't clear his hours + //yet i know calendar just won't clear its hours cal1.set(Calendar.HOUR_OF_DAY, 0); cal2.set(Calendar.HOUR_OF_DAY, 0); } diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java index 4197fc74afdf..121e66af4875 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataVisits.java @@ -285,13 +285,8 @@ && getDatasetGenerators().get(1) != null && getDatasetGenerators() DatasetQuery firsDataset = datasetQueries.get(0); //Do the first query - ObjectCount[] topCounts1 = null; -// if (firsDataset.getQueries().size() == 1) { - topCounts1 = + ObjectCount[] topCounts1 = queryFacetField(firsDataset, firsDataset.getQueries().get(0).getQuery(), filterQuery, facetMinCount); -// } else { -// TODO: do this -// } // Check if we have more queries that need to be done if (datasetQueries.size() == 2) { DatasetQuery secondDataSet = datasetQueries.get(1); @@ -313,7 +308,6 @@ && getDatasetGenerators().get(1) != null && getDatasetGenerators() } for (int i = 0; i < topCounts1.length; i++) { ObjectCount count1 = topCounts1[i]; - ObjectCount[] currentResult = new ObjectCount[topCounts2.length]; // Make sure we have a dataSet if (dataset == null) { @@ -627,6 +621,10 @@ protected Map getAttributes(String value, } if (dsoId != null && query.dsoType != -1) { + // Store the UUID of the DSO as an attribute. Needed in particular for Bitstream download usage reports, + // as the Bitstream itself won't be available when converting points to their REST representation + attrs.put("id", dsoId); + switch (query.dsoType) { case Constants.BITSTREAM: Bitstream bit = bitstreamService.findByIdOrLegacyId(context, dsoId); @@ -645,7 +643,7 @@ protected Map getAttributes(String value, // be null if a handle has not yet been assigned. In this case reference the // item its internal id. In the last case where the bitstream is not associated // with an item (such as a community logo) then reference the bitstreamID directly. - String identifier = null; + String identifier; if (owningItem != null && owningItem.getHandle() != null) { identifier = "handle/" + owningItem.getHandle(); } else if (owningItem != null) { diff --git a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataWorkflow.java b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataWorkflow.java index c0148ca1d262..b39872a60c72 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataWorkflow.java +++ b/dspace-api/src/main/java/org/dspace/statistics/content/StatisticsDataWorkflow.java @@ -13,6 +13,7 @@ import java.text.ParseException; import java.time.LocalDate; import java.time.Period; +import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; @@ -158,8 +159,8 @@ protected String getQuery() { } private long getMonthsDifference(Date date1, Date date2) { - LocalDate earlier = LocalDate.from(date1.toInstant()); - LocalDate later = LocalDate.from(date2.toInstant()); + LocalDate earlier = LocalDate.ofInstant(date1.toInstant(), ZoneOffset.UTC); + LocalDate later = LocalDate.ofInstant(date2.toInstant(), ZoneOffset.UTC); return Period.between(earlier, later).toTotalMonths(); } diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/IrusExportUsageEventListener.java b/dspace-api/src/main/java/org/dspace/statistics/export/IrusExportUsageEventListener.java index 5620dace08ff..807184a4bbef 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/IrusExportUsageEventListener.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/IrusExportUsageEventListener.java @@ -13,7 +13,7 @@ import org.dspace.content.Bitstream; import org.dspace.content.Item; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.services.ConfigurationService; import org.dspace.services.model.Event; import org.dspace.statistics.export.processor.BitstreamEventProcessor; @@ -66,7 +66,7 @@ public void receiveEvent(Event event) { } catch (Exception e1) { type = -1; } - log.error(LogManager.getHeader(ue.getContext(), "Error while processing export of use event", + log.error(LogHelper.getHeader(ue.getContext(), "Error while processing export of use event", "Id: " + id + " type: " + type), e); } } diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java index b5d65aa4e50e..7d1015c8e2ba 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.statistics.export; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script @@ -21,9 +16,6 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -41,30 +33,18 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { Options options = new Options(); options.addOption("a", true, "Add a new \"failed\" row to the table with a url (test purposes only)"); - options.getOption("a").setType(String.class); options.addOption("r", false, "Retry sending requests to all urls stored in the table with failed requests. " + "This includes the url that can be added through the -a option."); - options.getOption("r").setType(boolean.class); options.addOption("h", "help", false, "print this help message"); - options.getOption("h").setType(boolean.class); super.options = options; } diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/processor/ExportEventProcessor.java b/dspace-api/src/main/java/org/dspace/statistics/export/processor/ExportEventProcessor.java index 52feb1f4b892..609298779d34 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/processor/ExportEventProcessor.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/processor/ExportEventProcessor.java @@ -19,8 +19,6 @@ import org.apache.commons.codec.CharEncoding; import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.dspace.content.DCDate; import org.dspace.content.Entity; import org.dspace.content.EntityType; @@ -42,8 +40,6 @@ */ public abstract class ExportEventProcessor { - private static final Logger log = LogManager.getLogger(); - protected static final String ENTITY_TYPE_DEFAULT = "Publication"; protected static final String ITEM_VIEW = "Investigation"; @@ -130,8 +126,10 @@ protected String getBaseParameters(Item item) //Start adding our data StringBuilder data = new StringBuilder(); - data.append(URLEncoder.encode("url_ver", UTF_8) + "=" + - URLEncoder.encode(configurationService.getProperty("irus.statistics.tracker.urlversion"), UTF_8)); + data.append(URLEncoder.encode("url_ver", UTF_8)) + .append("=") + .append(URLEncoder.encode(configurationService.getProperty("irus.statistics.tracker.urlversion"), + UTF_8)); data.append("&").append(URLEncoder.encode("req_id", UTF_8)).append("=") .append(URLEncoder.encode(clientIP, UTF_8)); data.append("&").append(URLEncoder.encode("req_dat", UTF_8)).append("=") diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/processor/ItemEventProcessor.java b/dspace-api/src/main/java/org/dspace/statistics/export/processor/ItemEventProcessor.java index 507ca9238251..92adb67546ef 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/processor/ItemEventProcessor.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/processor/ItemEventProcessor.java @@ -24,9 +24,10 @@ */ public class ItemEventProcessor extends ExportEventProcessor { - private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private final ConfigurationService configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); - private Item item; + private final Item item; /** * Creates a new ItemEventProcessor that will set the params @@ -48,6 +49,7 @@ public ItemEventProcessor(Context context, HttpServletRequest request, Item item * @throws SQLException * @throws IOException */ + @Override public void processEvent() throws SQLException, IOException { if (shouldProcessItem(item)) { String baseParam = getBaseParameters(item); diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/service/OpenUrlServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/export/service/OpenUrlServiceImpl.java index d2524d78750a..b7a9562fb541 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/service/OpenUrlServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/service/OpenUrlServiceImpl.java @@ -9,13 +9,16 @@ import java.io.IOException; import java.net.HttpURLConnection; -import java.net.URL; -import java.net.URLConnection; import java.sql.SQLException; import java.util.Date; import java.util.List; import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.impl.client.HttpClientBuilder; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.core.Context; @@ -57,21 +60,30 @@ public void processUrl(Context c, String urlStr) throws SQLException { } /** - * Returns the response code from accessing the url + * Returns the response code from accessing the url. Returns a http status 408 when the external service doesn't + * reply in 10 seconds + * * @param urlStr * @return response code from the url * @throws IOException */ protected int getResponseCodeFromUrl(final String urlStr) throws IOException { - URLConnection conn; - URL url = new URL(urlStr); - conn = url.openConnection(); + HttpGet httpGet = new HttpGet(urlStr); + HttpClient httpClient = getHttpClient(getHttpClientRequestConfig()); + HttpResponse httpResponse = httpClient.execute(httpGet); + return httpResponse.getStatusLine().getStatusCode(); + } - HttpURLConnection httpURLConnection = (HttpURLConnection) conn; - int responseCode = httpURLConnection.getResponseCode(); - httpURLConnection.disconnect(); + protected HttpClient getHttpClient(RequestConfig requestConfig) { + return HttpClientBuilder.create() + .setDefaultRequestConfig(requestConfig) + .build(); + } - return responseCode; + protected RequestConfig getHttpClientRequestConfig() { + return RequestConfig.custom() + .setConnectTimeout(10 * 1000) + .build(); } /** diff --git a/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java b/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java index 081b7719644b..61b2bb6013de 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java +++ b/dspace-api/src/main/java/org/dspace/statistics/service/SolrLoggerService.java @@ -56,9 +56,23 @@ public void post(DSpaceObject dspaceObject, HttpServletRequest request, public void postView(DSpaceObject dspaceObject, HttpServletRequest request, EPerson currentUser); + /** + * Store a usage event into Solr. + * + * @param dspaceObject the object used. + * @param request the current request context. + * @param currentUser the current session's user. + * @param referrer the optional referrer. + */ + public void postView(DSpaceObject dspaceObject, HttpServletRequest request, + EPerson currentUser, String referrer); + public void postView(DSpaceObject dspaceObject, String ip, String userAgent, String xforwardedfor, EPerson currentUser); + public void postView(DSpaceObject dspaceObject, + String ip, String userAgent, String xforwardedfor, EPerson currentUser, String referrer); + public void postSearch(DSpaceObject resultObject, HttpServletRequest request, EPerson currentUser, List queries, int rpp, String sortBy, String order, int page, DSpaceObject scope); @@ -252,12 +266,6 @@ public QueryResponse query(String query, String filterQuery, */ public String getIgnoreSpiderIPs(); - /** - * Maintenance to keep a SOLR index efficient. - * Note: This might take a long time. - */ - public void optimizeSOLR(); - public void shardSolrIndex() throws IOException, SolrServerException; public void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception; diff --git a/dspace-api/src/main/java/org/dspace/statistics/util/IPTable.java b/dspace-api/src/main/java/org/dspace/statistics/util/IPTable.java index 139b75e8cf86..cb94dcc1a195 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/util/IPTable.java +++ b/dspace-api/src/main/java/org/dspace/statistics/util/IPTable.java @@ -7,11 +7,13 @@ */ package org.dspace.statistics.util; -import java.util.HashMap; +import java.net.InetAddress; +import java.net.UnknownHostException; import java.util.HashSet; -import java.util.Map; +import java.util.Iterator; import java.util.Set; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -25,8 +27,40 @@ public class IPTable { private static final Logger log = LogManager.getLogger(IPTable.class); /* A lookup tree for IP addresses and SubnetRanges */ - private final Map>>> map - = new HashMap<>(); + private final Set ipRanges = new HashSet<>(); + + /** + * Internal class representing an IP range + */ + static class IPRange { + + /* Lowest address in the range */ + private final long ipLo; + + /* Highest address in the range */ + private final long ipHi; + + IPRange(long ipLo, long ipHi) { + this.ipLo = ipLo; + this.ipHi = ipHi; + } + + /** + * Get the lowest address in the range + * @return the lowest address as a long integer + */ + public long getIpLo() { + return ipLo; + } + + /** + * Get the highest address in the range + * @return the highest address as a long integer + */ + public long getIpHi() { + return ipHi; + } + } /** * Can be full v4 IP, subnet or range string. @@ -45,79 +79,90 @@ public class IPTable { */ public void add(String ip) throws IPFormatException { - String[] start; + String start; - String[] end; + String end; String[] range = ip.split("-"); - if (range.length >= 2) { + if (range.length == 2) { - start = range[0].trim().split("/")[0].split("\\."); - end = range[1].trim().split("/")[0].split("\\."); - - if (start.length != 4 || end.length != 4) { - throw new IPFormatException(ip + " - Ranges need to be full IPv4 Addresses"); - } + start = range[0].trim(); + end = range[1].trim(); - if (!(start[0].equals(end[0]) && start[1].equals(end[1]) && start[2].equals(end[2]))) { - throw new IPFormatException(ip + " - Ranges can only be across the last subnet x.y.z.0 - x.y.z.254"); + try { + long ipLo = ipToLong(InetAddress.getByName(start)); + long ipHi = ipToLong(InetAddress.getByName(end)); + ipRanges.add(new IPRange(ipLo, ipHi)); + return; + } catch (UnknownHostException e) { + throw new IPFormatException(ip + " - Range format should be similar to 1.2.3.0-1.2.3.255"); } } else { - //need to ignore CIDR notation for the moment. - //ip = ip.split("\\/")[0]; - - String[] subnets = ip.split("\\."); - - if (subnets.length < 3) { - throw new IPFormatException(ip + " - require at least three subnet places (255.255.255.0"); + // Convert implicit ranges to netmask format + // 192 -> 192.0.0.0/8 + // 192.168 -> 192.168.0.0/16 + // 192.168.1 -> 192.168.1.0/24 + int periods = StringUtils.countMatches(ip, '.'); + if (periods < 3) { + ip = StringUtils.join(ip, StringUtils.repeat(".0", 4 - periods - 1), "/", (periods + 1) * 8); } - start = subnets; - end = subnets; - } - - if (start.length >= 3) { - Map>> first = map.get(start[0]); - - if (first == null) { - first = new HashMap<>(); - map.put(start[0], first); - } - - Map> second = first.get(start[1]); - - if (second == null) { - second = new HashMap<>(); - first.put(start[1], second); - } - - Set third = second.get(start[2]); - - if (third == null) { - third = new HashSet<>(); - second.put(start[2], third); - } - - //now populate fourth place (* or value 0-254); - - if (start.length == 3) { - third.add("*"); + if (ip.contains("/")) { + String[] parts = ip.split("/"); + try { + long ipLong = ipToLong(InetAddress.getByName(parts[0])); + long mask = (long) Math.pow(2, 32 - Integer.parseInt(parts[1])); + long ipLo = (ipLong / mask) * mask; + long ipHi = (( (ipLong / mask) + 1) * mask) - 1; + ipRanges.add(new IPRange(ipLo, ipHi)); + return; + } catch (Exception e) { + throw new IPFormatException(ip + " - Range format should be similar to 172.16.0.0/12"); + } + } else { + try { + long ipLo = ipToLong(InetAddress.getByName(ip)); + ipRanges.add(new IPRange(ipLo, ipLo)); + return; + } catch (UnknownHostException e) { + throw new IPFormatException(ip + " - IP address format should be similar to 1.2.3.14"); + } } + } + } - if (third.contains("*")) { - return; - } + /** + * Convert an IP address to a long integer + * @param ip the IP address + * @return + */ + public static long ipToLong(InetAddress ip) { + byte[] octets = ip.getAddress(); + long result = 0; + for (byte octet : octets) { + result <<= 8; + result |= octet & 0xff; + } + return result; + } - if (start.length >= 4) { - int s = Integer.valueOf(start[3]); - int e = Integer.valueOf(end[3]); - for (int i = s; i <= e; i++) { - third.add(String.valueOf(i)); - } - } + /** + * Convert a long integer into an IP address string + * @param ip the IP address as a long integer + * @return + */ + public static String longToIp(long ip) { + long part = ip; + String[] parts = new String[4]; + for (int i = 0; i < 4; i++) { + long octet = part & 0xff; + parts[3 - i] = String.valueOf(octet); + part = part / 256; } + + return parts[0] + "." + parts[1] + "." + parts[2] + "." + parts[3]; } /** @@ -125,75 +170,35 @@ public void add(String ip) throws IPFormatException { * * @param ip the address to be tested * @return true if {@code ip} is within this table's limits. Returns false - * if {@link ip} looks like an IPv6 address. + * if {@code ip} looks like an IPv6 address. * @throws IPFormatException Exception Class to deal with IPFormat errors. */ public boolean contains(String ip) throws IPFormatException { - String[] subnets = ip.split("\\."); - - // Does it look like IPv6? - if (subnets.length > 4 || ip.contains("::")) { - log.warn("Address {} assumed not to match. IPv6 is not implemented.", ip); - return false; - } - - // Does it look like a subnet? - if (subnets.length < 4) { - throw new IPFormatException("needs to be a single IP address"); - } - - Map>> first = map.get(subnets[0]); - - if (first == null) { - return false; - } - - Map> second = first.get(subnets[1]); - - if (second == null) { - return false; + try { + long ipToTest = ipToLong(InetAddress.getByName(ip)); + return ipRanges.stream() + .anyMatch(ipRange -> (ipToTest >= ipRange.getIpLo() && ipToTest <= ipRange.getIpHi())); + } catch (UnknownHostException e) { + throw new IPFormatException("ip not valid"); } - - Set third = second.get(subnets[2]); - - if (third == null) { - return false; - } - - return third.contains(subnets[3]) || third.contains("*"); - } /** - * Convert to a Set. + * Convert to a Set. This set contains all IPs in the range * * @return this table's content as a Set */ public Set toSet() { HashSet set = new HashSet<>(); - for (Map.Entry>>> first : map.entrySet()) { - String firstString = first.getKey(); - Map>> secondMap = first.getValue(); - - for (Map.Entry>> second : secondMap.entrySet()) { - String secondString = second.getKey(); - Map> thirdMap = second.getValue(); - - for (Map.Entry> third : thirdMap.entrySet()) { - String thirdString = third.getKey(); - Set fourthSet = third.getValue(); - - if (fourthSet.contains("*")) { - set.add(firstString + "." + secondString + "." + thirdString); - } else { - for (String fourth : fourthSet) { - set.add(firstString + "." + secondString + "." + thirdString + "." + fourth); - } - } - - } + Iterator ipRangeIterator = ipRanges.iterator(); + while (ipRangeIterator.hasNext()) { + IPRange ipRange = ipRangeIterator.next(); + long ipLo = ipRange.getIpLo(); + long ipHi = ipRange.getIpHi(); + for (long ip = ipLo; ip <= ipHi; ip++) { + set.add(longToIp(ip)); } } @@ -205,7 +210,7 @@ public Set toSet() { * @return true if empty, false otherwise */ public boolean isEmpty() { - return map.isEmpty(); + return ipRanges.isEmpty(); } /** @@ -217,5 +222,23 @@ public IPFormatException(String s) { } } - + /** + * Represent this IP table as a string + * @return a string containing all IP ranges in this IP table + */ + @Override + public String toString() { + StringBuilder stringBuilder = new StringBuilder(); + Iterator ipRangeIterator = ipRanges.iterator(); + while (ipRangeIterator.hasNext()) { + IPRange ipRange = ipRangeIterator.next(); + stringBuilder.append(longToIp(ipRange.getIpLo())) + .append("-") + .append(longToIp(ipRange.getIpHi())); + if (ipRangeIterator.hasNext()) { + stringBuilder.append(", "); + } + } + return stringBuilder.toString(); + } } diff --git a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java index b1b31c0fe146..319fe437d648 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java +++ b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsClient.java @@ -16,6 +16,7 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.logging.log4j.Logger; +import org.apache.tools.ant.Project; import org.apache.tools.ant.taskdefs.Get; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.statistics.factory.StatisticsServiceFactory; @@ -66,7 +67,6 @@ public static void main(String[] args) throws Exception { options.addOption("m", "mark-spiders", false, "Update isBot Flag in Solr"); options.addOption("f", "delete-spiders-by-flag", false, "Delete Spiders in Solr By isBot Flag"); options.addOption("i", "delete-spiders-by-ip", false, "Delete Spiders in Solr By IP Address"); - options.addOption("o", "optimize", false, "Run maintenance on the SOLR index"); options.addOption("b", "reindex-bitstreams", false, "Reindex the bitstreams to ensure we have the bundle name"); options.addOption("e", "export", false, "Export SOLR view statistics data to usage-statistics-intermediate-format"); @@ -92,8 +92,6 @@ public static void main(String[] args) throws Exception { solrLoggerService.deleteRobotsByIsBotFlag(); } else if (line.hasOption('i')) { solrLoggerService.deleteRobotsByIP(); - } else if (line.hasOption('o')) { - solrLoggerService.optimizeSOLR(); } else if (line.hasOption('b')) { solrLoggerService.reindexBitstreamHits(line.hasOption('r')); } else if (line.hasOption('e')) { @@ -136,6 +134,7 @@ private static void updateSpiderFiles() { URL url = new URL(value); Get get = new Get(); + get.setProject(new Project()); get.setDest(new File(spiders, url.getHost() + url.getPath().replace("/", "-"))); get.setSrc(url); get.setUseTimestamp(true); diff --git a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java index bd8662854f19..95736a8bd6d9 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java +++ b/dspace-api/src/main/java/org/dspace/statistics/util/StatisticsImporter.java @@ -348,9 +348,9 @@ protected void load(String filename, Context context, boolean verbose) { // Get the eperson details EPerson eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, user); - int epersonId = 0; + UUID epersonId = null; if (eperson != null) { - eperson.getID(); + epersonId = eperson.getID(); } // Save it in our server @@ -365,12 +365,10 @@ protected void load(String filename, Context context, boolean verbose) { sid.addField("city", city); sid.addField("latitude", latitude); sid.addField("longitude", longitude); - if (epersonId > 0) { + if (epersonId != null) { sid.addField("epersonid", epersonId); } - if (dns != null) { - sid.addField("dns", dns.toLowerCase()); - } + sid.addField("dns", dns.toLowerCase()); solrLoggerService.storeParents(sid, dso); solr.add(sid); diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java new file mode 100644 index 000000000000..5b367d7a8136 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BaseBitStoreService.java @@ -0,0 +1,217 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.storage.bitstore; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.security.DigestInputStream; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.core.Utils; + +/** + * BaseBitStoreService base implementation to store + * and organize assets in digits. + * + */ +public abstract class BaseBitStoreService implements BitStoreService { + + protected static Logger log = LogManager.getLogger(DSBitStoreService.class); + // Checksum algorithm + protected static final String CSA = "MD5"; + protected static final String MODIFIED = "modified"; + protected static final String CHECKSUM_ALGORITHM = "checksum_algorithm"; + protected static final String CHECKSUM = "checksum"; + protected static final String SIZE_BYTES = "size_bytes"; + + protected boolean initialized = false; + + // These settings control the way an identifier is hashed into + // directory and file names + // + // With digitsPerLevel 2 and directoryLevels 3, an identifier + // like 12345678901234567890 turns into the relative name + // /12/34/56/12345678901234567890. + // + // You should not change these settings if you have data in the + // asset store, as the BitstreamStorageManager will be unable + // to find your existing data. + protected static final int digitsPerLevel = 2; + protected static final int directoryLevels = 3; + + /** + * Return the intermediate path derived from the internal_id. This method splits + * the id into groups which become subdirectories. + * + * @param internalId The internal_id + * @return The path based on the id without leading or trailing separators + */ + protected String getIntermediatePath(String internalId) { + StringBuilder path = new StringBuilder(); + if (StringUtils.isEmpty(internalId) || internalId.length() <= digitsPerLevel) { + return path.append(internalId).append(File.separator).toString(); + } + populatePathSplittingId(internalId, path); + appendSeparator(path); + return path.toString(); + } + + /** + * Sanity Check: If the internal ID contains a pathname separator, it's probably + * an attempt to make a path traversal attack, so ignore the path prefix. The + * internal-ID is supposed to be just a filename, so this will not affect normal + * operation. + * + * @param sInternalId + * @return Sanitized id + */ + protected String sanitizeIdentifier(String sInternalId) { + if (sInternalId.contains(File.separator)) { + sInternalId = sInternalId.substring(sInternalId.lastIndexOf(File.separator) + 1); + } + return sInternalId; + } + + /** + * Append separator to target {@code StringBuilder} + * + * @param path + */ + protected void appendSeparator(StringBuilder path) { + if (!endsWithSeparator(path)) { + path.append(File.separator); + } + } + + /** + * Utility that checks string ending with separator + * + * @param bufFilename + * @return + */ + protected boolean endsWithSeparator(StringBuilder bufFilename) { + return bufFilename.lastIndexOf(File.separator) == bufFilename.length() - 1; + } + + /** + * Splits internalId into several subpaths using {@code digitsPerLevel} that + * indicates the folder name length, and {@code direcoryLevels} that indicates + * the maximum number of subfolders. + * + * @param internalId bitStream identifier + * @param path + */ + protected void populatePathSplittingId(String internalId, StringBuilder path) { + int digits = 0; + path.append(extractSubstringFrom(internalId, digits, digits + digitsPerLevel)); + for (int i = 1; i < directoryLevels && !isLonger(internalId, digits + digitsPerLevel); i++) { + digits = i * digitsPerLevel; + path.append(File.separator); + path.append(extractSubstringFrom(internalId, digits, digits + digitsPerLevel)); + } + } + + /** + * Extract substring if is in range, otherwise will truncate to length + * + * @param internalId + * @param startIndex + * @param endIndex + * @return + */ + protected String extractSubstringFrom(String internalId, int startIndex, int endIndex) { + if (isLonger(internalId, endIndex)) { + endIndex = internalId.length(); + } + return internalId.substring(startIndex, endIndex); + } + + /** + * Checks if the {@code String} is longer than {@code endIndex} + * + * @param internalId + * @param endIndex + * @return + */ + protected boolean isLonger(String internalId, int endIndex) { + return endIndex > internalId.length(); + } + + /** + * Retrieves a map of useful metadata about the File (size, checksum, modified) + * + * @param file The File to analyze + * @param attrs The list of requested metadata values + * @return Map of updated metadatas / attrs + * @throws IOException + */ + public Map about(File file, List attrs) throws IOException { + + Map metadata = new HashMap(); + + try { + if (file != null && file.exists()) { + this.putValueIfExistsKey(attrs, metadata, SIZE_BYTES, file.length()); + if (attrs.contains(CHECKSUM)) { + metadata.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file))); + metadata.put(CHECKSUM_ALGORITHM, CSA); + } + this.putValueIfExistsKey(attrs, metadata, MODIFIED, String.valueOf(file.lastModified())); + } + return metadata; + } catch (Exception e) { + log.error("about( FilePath: " + file.getAbsolutePath() + ", Map: " + attrs.toString() + ")", e); + throw new IOException(e); + } + } + + @Override + public boolean isInitialized() { + return this.initialized; + } + + private byte[] generateChecksumFrom(File file) throws FileNotFoundException, IOException { + // generate checksum by reading the bytes + try (FileInputStream fis = new FileInputStream(file)) { + return generateChecksumFrom(fis); + } catch (NoSuchAlgorithmException e) { + log.warn("Caught NoSuchAlgorithmException", e); + throw new IOException("Invalid checksum algorithm"); + } + } + + private byte[] generateChecksumFrom(FileInputStream fis) throws IOException, NoSuchAlgorithmException { + try (DigestInputStream dis = new DigestInputStream(fis, MessageDigest.getInstance(CSA))) { + final int BUFFER_SIZE = 1024 * 4; + final byte[] buffer = new byte[BUFFER_SIZE]; + while (true) { + final int count = dis.read(buffer, 0, BUFFER_SIZE); + if (count == -1) { + break; + } + } + return dis.getMessageDigest().digest(); + } + } + + protected void putValueIfExistsKey(List attrs, Map metadata, String key, Object value) { + if (attrs.contains(key)) { + metadata.put(key, value); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreMigrate.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreMigrate.java index f3a2712a6104..4d9ed7508053 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreMigrate.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreMigrate.java @@ -15,8 +15,6 @@ import org.apache.commons.cli.ParseException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.BitstreamService; import org.dspace.core.Context; import org.dspace.storage.bitstore.factory.StorageServiceFactory; import org.dspace.storage.bitstore.service.BitstreamStorageService; @@ -31,8 +29,6 @@ public class BitStoreMigrate { */ private static final Logger log = LogManager.getLogger(BitStoreMigrate.class); - private static final BitstreamService bitstreamService - = ContentServiceFactory.getInstance().getBitstreamService(); private static final BitstreamStorageService bitstreamStorageService = StorageServiceFactory.getInstance().getBitstreamStorageService(); diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java index b33867f0e2ec..5a02ad1d5617 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitStoreService.java @@ -9,6 +9,7 @@ import java.io.IOException; import java.io.InputStream; +import java.util.List; import java.util.Map; import org.dspace.content.Bitstream; @@ -62,13 +63,13 @@ public interface BitStoreService { * Obtain technical metadata about an asset in the asset store. * * @param bitstream The bitstream to describe - * @param attrs A Map whose keys consist of desired metadata fields + * @param attrs A List of desired metadata fields * @return attrs * A Map with key/value pairs of desired metadata * If file not found, then return null * @throws java.io.IOException If a problem occurs while obtaining metadata */ - public Map about(Bitstream bitstream, Map attrs) throws IOException; + public Map about(Bitstream bitstream, List attrs) throws IOException; /** * Remove an asset from the asset store. @@ -77,4 +78,20 @@ public interface BitStoreService { * @throws java.io.IOException If a problem occurs while removing the asset */ public void remove(Bitstream bitstream) throws IOException; + + /** + * Determines if a store has been initialized + * + * @return {@code boolean} true if initialized, false otherwise + */ + public boolean isInitialized(); + + /** + * Determines if a store is enabled, by default is enabled + * + * @return {@code boolean} true if enabled, false otherwise + */ + public default boolean isEnabled() { + return true; + } } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java index de1f671ca5ad..956ac5a7f8f1 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/BitstreamStorageServiceImpl.java @@ -17,7 +17,9 @@ import java.util.UUID; import javax.annotation.Nullable; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections4.MapUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.checker.service.ChecksumHistoryService; @@ -57,13 +59,12 @@ * be notified of BitstreamStorageManager actions.

    * * @author Peter Breton, Robert Tansley, David Little, Nathan Sarr - * @version $Revision$ */ public class BitstreamStorageServiceImpl implements BitstreamStorageService, InitializingBean { /** * log4j log */ - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BitstreamStorageServiceImpl.class); + private static final Logger log = LogManager.getLogger(); @Autowired(required = true) protected BitstreamService bitstreamService; @@ -73,7 +74,7 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini /** * asset stores */ - private Map stores = new HashMap(); + private Map stores = new HashMap<>(); /** * The index of the asset store to use for new bitstreams @@ -92,7 +93,9 @@ protected BitstreamStorageServiceImpl() { @Override public void afterPropertiesSet() throws Exception { for (Map.Entry storeEntry : stores.entrySet()) { - storeEntry.getValue().init(); + if (storeEntry.getValue().isEnabled() && !storeEntry.getValue().isInitialized()) { + storeEntry.getValue().init(); + } } } @@ -100,19 +103,18 @@ public void afterPropertiesSet() throws Exception { public UUID store(Context context, Bitstream bitstream, InputStream is) throws SQLException, IOException { // Create internal ID String id = Utils.generateKey(); - - bitstream.setDeleted(true); - bitstream.setInternalId(id); - /* * Set the store number of the new bitstream If you want to use some * other method of working out where to put a new bitstream, here's * where it should go */ bitstream.setStoreNumber(incoming); + bitstream.setDeleted(true); + bitstream.setInternalId(id); + BitStoreService store = this.getStore(incoming); //For efficiencies sake, PUT is responsible for setting bitstream size_bytes, checksum, and checksum_algorithm - stores.get(incoming).put(bitstream, is); + store.put(bitstream, is); //bitstream.setSizeBytes(file.length()); //bitstream.setChecksum(Utils.toHex(dis.getMessageDigest().digest())); //bitstream.setChecksumAlgorithm("MD5"); @@ -164,12 +166,9 @@ public UUID register(Context context, Bitstream bitstream, int assetstore, bitstream.setStoreNumber(assetstore); bitstreamService.update(context, bitstream); - Map wantedMetadata = new HashMap(); - wantedMetadata.put("size_bytes", null); - wantedMetadata.put("checksum", null); - wantedMetadata.put("checksum_algorithm", null); + List wantedMetadata = List.of("size_bytes", "checksum", "checksum_algorithm"); + Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata); - Map receivedMetadata = stores.get(assetstore).about(bitstream, wantedMetadata); if (MapUtils.isEmpty(receivedMetadata)) { String message = "Not able to register bitstream:" + bitstream.getID() + " at path: " + bitstreamPath; log.error(message); @@ -199,13 +198,8 @@ public UUID register(Context context, Bitstream bitstream, int assetstore, } @Override - public Map computeChecksum(Context context, Bitstream bitstream) throws IOException { - Map wantedMetadata = new HashMap(); - wantedMetadata.put("checksum", null); - wantedMetadata.put("checksum_algorithm", null); - - Map receivedMetadata = stores.get(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); - return receivedMetadata; + public Map computeChecksum(Context context, Bitstream bitstream) throws IOException { + return this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("checksum", "checksum_algorithm")); } @Override @@ -217,32 +211,67 @@ public boolean isRegisteredBitstream(String internalId) { public InputStream retrieve(Context context, Bitstream bitstream) throws SQLException, IOException { Integer storeNumber = bitstream.getStoreNumber(); - return stores.get(storeNumber).get(bitstream); + return this.getStore(storeNumber).get(bitstream); } @Override public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLException, IOException, AuthorizeException { - Context context = null; - int commitCounter = 0; + Context context = new Context(Context.Mode.BATCH_EDIT); + + int offset = 0; + int limit = 100; + + int cleanedBitstreamCount = 0; + + int deletedBitstreamCount = bitstreamService.countDeletedBitstreams(context); + System.out.println("Found " + deletedBitstreamCount + " deleted bistream to cleanup"); try { - context = new Context(Context.Mode.BATCH_EDIT); context.turnOffAuthorisationSystem(); - List storage = bitstreamService.findDeletedBitstreams(context); - for (Bitstream bitstream : storage) { - UUID bid = bitstream.getID(); - Map wantedMetadata = new HashMap(); - wantedMetadata.put("size_bytes", null); - wantedMetadata.put("modified", null); - Map receivedMetadata = stores.get(bitstream.getStoreNumber()).about(bitstream, wantedMetadata); + while (cleanedBitstreamCount < deletedBitstreamCount) { + + List storage = bitstreamService.findDeletedBitstreams(context, limit, offset); + if (CollectionUtils.isEmpty(storage)) { + break; + } + + for (Bitstream bitstream : storage) { + UUID bid = bitstream.getID(); + List wantedMetadata = List.of("size_bytes", "modified"); + Map receivedMetadata = this.getStore(bitstream.getStoreNumber()) + .about(bitstream, wantedMetadata); + + + // Make sure entries which do not exist are removed + if (MapUtils.isEmpty(receivedMetadata)) { + log.debug("bitstore.about is empty, so file is not present"); + if (deleteDbRecords) { + log.debug("deleting record"); + if (verbose) { + System.out.println(" - Deleting bitstream information (ID: " + bid + ")"); + } + checksumHistoryService.deleteByBitstream(context, bitstream); + if (verbose) { + System.out.println(" - Deleting bitstream record from database (ID: " + bid + ")"); + } + bitstreamService.expunge(context, bitstream); + } + context.uncacheEntity(bitstream); + continue; + } + + // This is a small chance that this is a file which is + // being stored -- get it next time. + if (isRecent(Long.valueOf(receivedMetadata.get("modified").toString()))) { + log.debug("file is recent"); + context.uncacheEntity(bitstream); + continue; + } - // Make sure entries which do not exist are removed - if (MapUtils.isEmpty(receivedMetadata)) { - log.debug("bitstore.about is empty, so file is not present"); if (deleteDbRecords) { - log.debug("deleting record"); + log.debug("deleting db record"); if (verbose) { System.out.println(" - Deleting bitstream information (ID: " + bid + ")"); } @@ -252,60 +281,42 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio } bitstreamService.expunge(context, bitstream); } - context.uncacheEntity(bitstream); - continue; - } - - // This is a small chance that this is a file which is - // being stored -- get it next time. - if (isRecent(Long.valueOf(receivedMetadata.get("modified").toString()))) { - log.debug("file is recent"); - context.uncacheEntity(bitstream); - continue; - } - if (deleteDbRecords) { - log.debug("deleting db record"); - if (verbose) { - System.out.println(" - Deleting bitstream information (ID: " + bid + ")"); + if (isRegisteredBitstream(bitstream.getInternalId())) { + context.uncacheEntity(bitstream); + continue; // do not delete registered bitstreams } - checksumHistoryService.deleteByBitstream(context, bitstream); - if (verbose) { - System.out.println(" - Deleting bitstream record from database (ID: " + bid + ")"); + + + // Since versioning allows for multiple bitstreams, check if the internal + // identifier isn't used on + // another place + if (bitstreamService.findDuplicateInternalIdentifier(context, bitstream).isEmpty()) { + this.getStore(bitstream.getStoreNumber()).remove(bitstream); + + String message = ("Deleted bitstreamID " + bid + ", internalID " + bitstream.getInternalId()); + if (log.isDebugEnabled()) { + log.debug(message); + } + if (verbose) { + System.out.println(message); + } } - bitstreamService.expunge(context, bitstream); - } - if (isRegisteredBitstream(bitstream.getInternalId())) { context.uncacheEntity(bitstream); - continue; // do not delete registered bitstreams } + // Commit actual changes to DB after dispatch events + System.out.print("Performing incremental commit to the database..."); + context.commit(); + System.out.println(" Incremental commit done!"); - // Since versioning allows for multiple bitstreams, check if the internal identifier isn't used on - // another place - if (bitstreamService.findDuplicateInternalIdentifier(context, bitstream).isEmpty()) { - stores.get(bitstream.getStoreNumber()).remove(bitstream); + cleanedBitstreamCount = cleanedBitstreamCount + storage.size(); - String message = ("Deleted bitstreamID " + bid + ", internalID " + bitstream.getInternalId()); - if (log.isDebugEnabled()) { - log.debug(message); - } - if (verbose) { - System.out.println(message); - } + if (!deleteDbRecords) { + offset = offset + limit; } - // Make sure to commit our outstanding work every 100 - // iterations. Otherwise you risk losing the entire transaction - // if we hit an exception, which isn't useful at all for large - // amounts of bitstreams. - commitCounter++; - if (commitCounter % 100 == 0) { - context.dispatchEvents(); - } - - context.uncacheEntity(bitstream); } System.out.print("Committing changes to the database..."); @@ -321,22 +332,18 @@ public void cleanup(boolean deleteDbRecords, boolean verbose) throws SQLExceptio context.abort(); throw sqle; } finally { - if (context != null) { - context.restoreAuthSystemState(); - } + context.restoreAuthSystemState(); } } @Nullable @Override public Long getLastModified(Bitstream bitstream) throws IOException { - Map attrs = new HashMap(); - attrs.put("modified", null); - attrs = stores.get(bitstream.getStoreNumber()).about(bitstream, attrs); - if (attrs == null || !attrs.containsKey("modified")) { + Map metadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("modified")); + if (metadata == null || !metadata.containsKey("modified")) { return null; } - return Long.valueOf(attrs.get("modified").toString()); + return Long.valueOf(metadata.get("modified").toString()); } /** @@ -350,20 +357,30 @@ public Long getLastModified(Bitstream bitstream) throws IOException { */ @Override public Bitstream clone(Context context, Bitstream bitstream) throws SQLException, IOException, AuthorizeException { - Bitstream clonedBitstream = bitstreamService.clone(context, bitstream); - clonedBitstream.setStoreNumber(bitstream.getStoreNumber()); + Bitstream clonedBitstream = null; + try { + // Update our bitstream but turn off the authorization system since permissions + // haven't been set at this point in time. + context.turnOffAuthorisationSystem(); + clonedBitstream = bitstreamService.clone(context, bitstream); + clonedBitstream.setStoreNumber(bitstream.getStoreNumber()); - List metadataValues = bitstreamService - .getMetadata(bitstream, Item.ANY, Item.ANY, Item.ANY, Item.ANY); + List metadataValues = bitstreamService.getMetadata(bitstream, Item.ANY, Item.ANY, Item.ANY, + Item.ANY); - for (MetadataValue metadataValue : metadataValues) { - bitstreamService.addMetadata(context, clonedBitstream, metadataValue.getMetadataField(), - metadataValue.getLanguage(), metadataValue.getValue(), metadataValue.getAuthority(), - metadataValue.getConfidence()); + for (MetadataValue metadataValue : metadataValues) { + bitstreamService.addMetadata(context, clonedBitstream, metadataValue.getMetadataField(), + metadataValue.getLanguage(), metadataValue.getValue(), metadataValue.getAuthority(), + metadataValue.getConfidence()); + } + bitstreamService.update(context, clonedBitstream); + } catch (AuthorizeException e) { + log.error(e); + // Can never happen since we turn off authorization before we update + } finally { + context.restoreAuthSystemState(); } - bitstreamService.update(context, clonedBitstream); return clonedBitstream; - } /** @@ -376,11 +393,12 @@ public Bitstream clone(Context context, Bitstream bitstream) throws SQLException * @throws AuthorizeException Exception indicating the current user of the context does not have permission * to perform a particular action. */ + @Override public void migrate(Context context, Integer assetstoreSource, Integer assetstoreDestination, boolean deleteOld, Integer batchCommitSize) throws IOException, SQLException, AuthorizeException { //Find all the bitstreams on the old source, copy it to new destination, update store_number, save, remove old Iterator allBitstreamsInSource = bitstreamService.findByStoreNumber(context, assetstoreSource); - Integer processedCounter = 0; + int processedCounter = 0; while (allBitstreamsInSource.hasNext()) { Bitstream bitstream = allBitstreamsInSource.next(); @@ -390,13 +408,13 @@ public void migrate(Context context, Integer assetstoreSource, Integer assetstor .getName() + ", SizeBytes:" + bitstream.getSizeBytes()); InputStream inputStream = retrieve(context, bitstream); - stores.get(assetstoreDestination).put(bitstream, inputStream); + this.getStore(assetstoreDestination).put(bitstream, inputStream); bitstream.setStoreNumber(assetstoreDestination); bitstreamService.update(context, bitstream); if (deleteOld) { log.info("Removing bitstream:" + bitstream.getID() + " from assetstore[" + assetstoreSource + "]"); - stores.get(assetstoreSource).remove(bitstream); + this.getStore(assetstoreSource).remove(bitstream); } processedCounter++; @@ -414,14 +432,18 @@ public void migrate(Context context, Integer assetstoreSource, Integer assetstor "] completed. " + processedCounter + " objects were transferred."); } + @Override public void printStores(Context context) { try { for (Integer storeNumber : stores.keySet()) { long countBitstreams = bitstreamService.countByStoreNumber(context, storeNumber); - System.out.println("store[" + storeNumber + "] == " + stores.get(storeNumber).getClass() - .getSimpleName() + ", which has " + - countBitstreams + " bitstreams."); + BitStoreService store = this.stores.get(storeNumber); + System.out.println( + "store[" + storeNumber + "] == " + store.getClass().getSimpleName() + + ", which has initialized-status: " + store.isInitialized() + + ", and has: " + countBitstreams + " bitstreams." + ); } System.out.println("Incoming assetstore is store[" + incoming + "]"); } catch (SQLException e) { @@ -465,4 +487,13 @@ protected boolean isRecent(Long lastModified) { // Less than one hour old return (now - lastModified) < (1 * 60 * 1000); } + + protected BitStoreService getStore(int position) throws IOException { + BitStoreService bitStoreService = this.stores.get(position); + if (!bitStoreService.isInitialized()) { + bitStoreService.init(); + } + return bitStoreService; + } + } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java index 36f75c67f9eb..6fef7365e482 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/DSBitStoreService.java @@ -15,6 +15,7 @@ import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.util.List; import java.util.Map; import org.apache.logging.log4j.Logger; @@ -29,33 +30,17 @@ * @author Peter Breton, Robert Tansley, Richard Rodgers, Peter Dietz */ -public class DSBitStoreService implements BitStoreService { +public class DSBitStoreService extends BaseBitStoreService { /** * log4j log */ private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSBitStoreService.class); - // These settings control the way an identifier is hashed into - // directory and file names - // - // With digitsPerLevel 2 and directoryLevels 3, an identifier - // like 12345678901234567890 turns into the relative name - // /12/34/56/12345678901234567890. - // - // You should not change these settings if you have data in the - // asset store, as the BitstreamStorageManager will be unable - // to find your existing data. - private static final int digitsPerLevel = 2; - - private static final int directoryLevels = 3; - - // Checksum algorithm - private static final String CSA = "MD5"; - /** * the asset directory */ private File baseDir; + protected final String REGISTERED_FLAG = "-R"; public DSBitStoreService() { } @@ -66,6 +51,7 @@ public DSBitStoreService() { public void init() { // the config string contains just the asset store directory path //set baseDir? + this.initialized = true; } /** @@ -141,46 +127,18 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { /** * Obtain technical metadata about an asset in the asset store. * - * @param bitstream The asset to describe - * @param attrs A Map whose keys consist of desired metadata fields - * @return attrs - * A Map with key/value pairs of desired metadata - * @throws java.io.IOException If a problem occurs while obtaining metadata + * @param bitstream The asset to describe + * @param attrs A List of desired metadata fields + * @return attrs A Map with key/value pairs of desired metadata + * @throws java.io.IOException If a problem occurs while obtaining + * metadata */ - public Map about(Bitstream bitstream, Map attrs) throws IOException { + public Map about(Bitstream bitstream, List attrs) throws IOException { try { // potentially expensive, since it may calculate the checksum File file = getFile(bitstream); if (file != null && file.exists()) { - if (attrs.containsKey("size_bytes")) { - attrs.put("size_bytes", file.length()); - } - if (attrs.containsKey("checksum")) { - // generate checksum by reading the bytes - DigestInputStream dis = null; - try { - FileInputStream fis = new FileInputStream(file); - dis = new DigestInputStream(fis, MessageDigest.getInstance(CSA)); - } catch (NoSuchAlgorithmException e) { - log.warn("Caught NoSuchAlgorithmException", e); - throw new IOException("Invalid checksum algorithm"); - } - final int BUFFER_SIZE = 1024 * 4; - final byte[] buffer = new byte[BUFFER_SIZE]; - while (true) { - final int count = dis.read(buffer, 0, BUFFER_SIZE); - if (count == -1) { - break; - } - } - attrs.put("checksum", Utils.toHex(dis.getMessageDigest().digest())); - attrs.put("checksum_algorithm", CSA); - dis.close(); - } - if (attrs.containsKey("modified")) { - attrs.put("modified", String.valueOf(file.lastModified())); - } - return attrs; + return super.about(file, attrs); } return null; } catch (Exception e) { @@ -278,10 +236,7 @@ protected File getFile(Bitstream bitstream) throws IOException { // make a path traversal attack, so ignore the path // prefix. The internal-ID is supposed to be just a // filename, so this will not affect normal operation. - if (sInternalId.contains(File.separator)) { - sInternalId = sInternalId.substring(sInternalId.lastIndexOf(File.separator) + 1); - } - + sInternalId = this.sanitizeIdentifier(sInternalId); sIntermediatePath = getIntermediatePath(sInternalId); } @@ -297,29 +252,6 @@ protected File getFile(Bitstream bitstream) throws IOException { return new File(bufFilename.toString()); } - /** - * Return the intermediate path derived from the internal_id. This method - * splits the id into groups which become subdirectories. - * - * @param iInternalId The internal_id - * @return The path based on the id without leading or trailing separators - */ - protected String getIntermediatePath(String iInternalId) { - StringBuilder buf = new StringBuilder(); - for (int i = 0; i < directoryLevels; i++) { - int digits = i * digitsPerLevel; - if (i > 0) { - buf.append(File.separator); - } - buf.append(iInternalId.substring(digits, digits - + digitsPerLevel)); - } - buf.append(File.separator); - return buf.toString(); - } - - protected final String REGISTERED_FLAG = "-R"; - public boolean isRegisteredBitstream(String internalId) { return internalId.startsWith(REGISTERED_FLAG); } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java new file mode 100644 index 000000000000..62c24544eeac --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/DeleteOnCloseFileInputStream.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.storage.bitstore; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; + +/** + * When inputstream closes, then delete the file + * http://stackoverflow.com/a/4694155/368581 + */ +public class DeleteOnCloseFileInputStream extends FileInputStream { + + private File file; + + public DeleteOnCloseFileInputStream(String fileName) throws FileNotFoundException { + this(new File(fileName)); + } + + public DeleteOnCloseFileInputStream(File file) throws FileNotFoundException { + super(file); + this.file = file; + } + + public void close() throws IOException { + try { + super.close(); + } finally { + if (file != null) { + file.delete(); + file = null; + } + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java index ce2b3b3f05a9..7a09dd2e76df 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/S3BitStoreService.java @@ -7,25 +7,44 @@ */ package org.dspace.storage.bitstore; +import static java.lang.String.valueOf; + import java.io.File; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.security.DigestInputStream; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.function.Supplier; +import javax.validation.constraints.NotNull; import com.amazonaws.AmazonClientException; import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.regions.Region; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3Client; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.ObjectMetadata; -import com.amazonaws.services.s3.model.PutObjectRequest; -import com.amazonaws.services.s3.model.PutObjectResult; -import com.amazonaws.services.s3.model.S3Object; -import org.apache.commons.io.FileUtils; +import com.amazonaws.services.s3.transfer.Download; +import com.amazonaws.services.s3.transfer.TransferManager; +import com.amazonaws.services.s3.transfer.TransferManagerBuilder; +import com.amazonaws.services.s3.transfer.Upload; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.io.output.NullOutputStream; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; @@ -34,6 +53,9 @@ import org.dspace.core.Utils; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.storage.bitstore.factory.StorageServiceFactory; +import org.dspace.storage.bitstore.service.BitstreamStorageService; +import org.dspace.util.FunctionalUtils; import org.springframework.beans.factory.annotation.Autowired; /** @@ -42,9 +64,14 @@ * NB: you must have obtained an account with Amazon to use this store * * @author Richard Rodgers, Peter Dietz + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * */ -public class S3BitStoreService implements BitStoreService { +public class S3BitStoreService extends BaseBitStoreService { + protected static final String DEFAULT_BUCKET_PREFIX = "dspace-asset-"; + // Prefix indicating a registered bitstream + protected final String REGISTERED_FLAG = "-R"; /** * log4j log */ @@ -53,11 +80,27 @@ public class S3BitStoreService implements BitStoreService { /** * Checksum algorithm */ - private static final String CSA = "MD5"; + static final String CSA = "MD5"; + + // These settings control the way an identifier is hashed into + // directory and file names + // + // With digitsPerLevel 2 and directoryLevels 3, an identifier + // like 12345678901234567890 turns into the relative name + // /12/34/56/12345678901234567890. + // + // You should not change these settings if you have data in the + // asset store, as the BitstreamStorageManager will be unable + // to find your existing data. + protected static final int digitsPerLevel = 2; + protected static final int directoryLevels = 3; + + private boolean enabled = false; private String awsAccessKey; private String awsSecretKey; private String awsRegionName; + private boolean useRelativePath; /** * container for all the assets @@ -74,9 +117,46 @@ public class S3BitStoreService implements BitStoreService { */ private AmazonS3 s3Service = null; + /** + * S3 transfer manager + * this is reused between put calls to use less resources for multiple uploads + */ + private TransferManager tm = null; + private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - public S3BitStoreService() { + + /** + * Utility method for generate AmazonS3 builder + * + * @param regions wanted regions in client + * @param awsCredentials credentials of the client + * @return builder with the specified parameters + */ + protected static Supplier amazonClientBuilderBy( + @NotNull Regions regions, + @NotNull AWSCredentials awsCredentials + ) { + return () -> AmazonS3ClientBuilder.standard() + .withCredentials(new AWSStaticCredentialsProvider(awsCredentials)) + .withRegion(regions) + .build(); + } + + public S3BitStoreService() {} + + /** + * This constructor is used for test purpose. + * + * @param s3Service AmazonS3 service + */ + protected S3BitStoreService(AmazonS3 s3Service) { + this.s3Service = s3Service; + } + + @Override + public boolean isEnabled() { + return this.enabled; } /** @@ -88,47 +168,70 @@ public S3BitStoreService() { */ @Override public void init() throws IOException { - if (StringUtils.isBlank(getAwsAccessKey()) || StringUtils.isBlank(getAwsSecretKey())) { - log.warn("Empty S3 access or secret"); - } - // init client - AWSCredentials awsCredentials = new BasicAWSCredentials(getAwsAccessKey(), getAwsSecretKey()); - s3Service = new AmazonS3Client(awsCredentials); - - // bucket name - if (StringUtils.isEmpty(bucketName)) { - // get hostname of DSpace UI to use to name bucket - String hostname = Utils.getHostName(configurationService.getProperty("dspace.ui.url")); - bucketName = "dspace-asset-" + hostname; - log.warn("S3 BucketName is not configured, setting default: " + bucketName); + if (this.isInitialized()) { + return; } try { - if (!s3Service.doesBucketExist(bucketName)) { - s3Service.createBucket(bucketName); - log.info("Creating new S3 Bucket: " + bucketName); + if (StringUtils.isNotBlank(getAwsAccessKey()) && StringUtils.isNotBlank(getAwsSecretKey())) { + log.warn("Use local defined S3 credentials"); + // region + Regions regions = Regions.DEFAULT_REGION; + if (StringUtils.isNotBlank(awsRegionName)) { + try { + regions = Regions.fromName(awsRegionName); + } catch (IllegalArgumentException e) { + log.warn("Invalid aws_region: " + awsRegionName); + } + } + // init client + s3Service = FunctionalUtils.getDefaultOrBuild( + this.s3Service, + amazonClientBuilderBy( + regions, + new BasicAWSCredentials(getAwsAccessKey(), getAwsSecretKey()) + ) + ); + log.warn("S3 Region set to: " + regions.getName()); + } else { + log.info("Using a IAM role or aws environment credentials"); + s3Service = FunctionalUtils.getDefaultOrBuild( + this.s3Service, + AmazonS3ClientBuilder::defaultClient + ); + } + + // bucket name + if (StringUtils.isEmpty(bucketName)) { + // get hostname of DSpace UI to use to name bucket + String hostname = Utils.getHostName(configurationService.getProperty("dspace.ui.url")); + bucketName = DEFAULT_BUCKET_PREFIX + hostname; + log.warn("S3 BucketName is not configured, setting default: " + bucketName); } - } catch (AmazonClientException e) { - log.error(e); - throw new IOException(e); - } - // region - if (StringUtils.isNotBlank(awsRegionName)) { try { - Regions regions = Regions.fromName(awsRegionName); - Region region = Region.getRegion(regions); - s3Service.setRegion(region); - log.info("S3 Region set to: " + region.getName()); - } catch (IllegalArgumentException e) { - log.warn("Invalid aws_region: " + awsRegionName); + if (!s3Service.doesBucketExistV2(bucketName)) { + s3Service.createBucket(bucketName); + log.info("Creating new S3 Bucket: " + bucketName); + } + } catch (AmazonClientException e) { + throw new IOException(e); } + this.initialized = true; + log.info("AWS S3 Assetstore ready to go! bucket:" + bucketName); + } catch (Exception e) { + this.initialized = false; + log.error("Can't initialize this store!", e); } log.info("AWS S3 Assetstore ready to go! bucket:" + bucketName); - } + tm = FunctionalUtils.getDefaultOrBuild(tm, () -> TransferManagerBuilder.standard() + .withAlwaysCalculateMultipartMd5(true) + .withS3Client(s3Service) + .build()); + } /** * Return an identifier unique to this asset store instance @@ -151,10 +254,21 @@ public String generateId() { @Override public InputStream get(Bitstream bitstream) throws IOException { String key = getFullKey(bitstream.getInternalId()); + // Strip -R from bitstream key if it's registered + if (isRegisteredBitstream(key)) { + key = key.substring(REGISTERED_FLAG.length()); + } try { - S3Object object = s3Service.getObject(new GetObjectRequest(bucketName, key)); - return (object != null) ? object.getObjectContent() : null; - } catch (AmazonClientException e) { + File tempFile = File.createTempFile("s3-disk-copy-" + UUID.randomUUID(), "temp"); + tempFile.deleteOnExit(); + + GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, key); + + Download download = tm.download(getObjectRequest, tempFile); + download.waitForCompletion(); + + return new DeleteOnCloseFileInputStream(tempFile); + } catch (AmazonClientException | InterruptedException e) { log.error("get(" + key + ")", e); throw new IOException(e); } @@ -176,25 +290,33 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { String key = getFullKey(bitstream.getInternalId()); //Copy istream to temp file, and send the file, with some metadata File scratchFile = File.createTempFile(bitstream.getInternalId(), "s3bs"); - try { - FileUtils.copyInputStreamToFile(in, scratchFile); - long contentLength = scratchFile.length(); - - PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, key, scratchFile); - PutObjectResult putObjectResult = s3Service.putObject(putObjectRequest); - - bitstream.setSizeBytes(contentLength); - bitstream.setChecksum(putObjectResult.getETag()); + try ( + FileOutputStream fos = new FileOutputStream(scratchFile); + // Read through a digest input stream that will work out the MD5 + DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)); + ) { + Utils.bufferedCopy(dis, fos); + in.close(); + + Upload upload = tm.upload(bucketName, key, scratchFile); + + upload.waitForUploadResult(); + + bitstream.setSizeBytes(scratchFile.length()); + // we cannot use the S3 ETAG here as it could be not a MD5 in case of multipart upload (large files) or if + // the bucket is encrypted + bitstream.setChecksum(Utils.toHex(dis.getMessageDigest().digest())); bitstream.setChecksumAlgorithm(CSA); - scratchFile.delete(); - - } catch (AmazonClientException | IOException e) { + } catch (AmazonClientException | IOException | InterruptedException e) { log.error("put(" + bitstream.getInternalId() + ", is)", e); throw new IOException(e); + } catch (NoSuchAlgorithmException nsae) { + // Should never happen + log.warn("Caught NoSuchAlgorithmException", nsae); } finally { - if (scratchFile.exists()) { - scratchFile.delete(); + if (!scratchFile.delete()) { + scratchFile.deleteOnExit(); } } } @@ -206,40 +328,56 @@ public void put(Bitstream bitstream, InputStream in) throws IOException { * (Does not use getContentMD5, as that is 128-bit MD5 digest calculated on caller's side) * * @param bitstream The asset to describe - * @param attrs A Map whose keys consist of desired metadata fields + * @param attrs A List of desired metadata fields * @return attrs * A Map with key/value pairs of desired metadata * If file not found, then return null * @throws java.io.IOException If a problem occurs while obtaining metadata */ @Override - public Map about(Bitstream bitstream, Map attrs) throws IOException { + public Map about(Bitstream bitstream, List attrs) throws IOException { + String key = getFullKey(bitstream.getInternalId()); + // If this is a registered bitstream, strip the -R prefix before retrieving + if (isRegisteredBitstream(key)) { + key = key.substring(REGISTERED_FLAG.length()); + } + + Map metadata = new HashMap<>(); + try { - ObjectMetadata objectMetadata = s3Service.getObjectMetadata(bucketName, key); + ObjectMetadata objectMetadata = s3Service.getObjectMetadata(bucketName, key); if (objectMetadata != null) { - if (attrs.containsKey("size_bytes")) { - attrs.put("size_bytes", objectMetadata.getContentLength()); - } - if (attrs.containsKey("checksum")) { - attrs.put("checksum", objectMetadata.getETag()); - attrs.put("checksum_algorithm", CSA); - } - if (attrs.containsKey("modified")) { - attrs.put("modified", String.valueOf(objectMetadata.getLastModified().getTime())); + putValueIfExistsKey(attrs, metadata, "size_bytes", objectMetadata.getContentLength()); + putValueIfExistsKey(attrs, metadata, "modified", valueOf(objectMetadata.getLastModified().getTime())); + } + + putValueIfExistsKey(attrs, metadata, "checksum_algorithm", CSA); + + if (attrs.contains("checksum")) { + try (InputStream in = get(bitstream); + DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA)) + ) { + Utils.copy(dis, NullOutputStream.NULL_OUTPUT_STREAM); + byte[] md5Digest = dis.getMessageDigest().digest(); + metadata.put("checksum", Utils.toHex(md5Digest)); + } catch (NoSuchAlgorithmException nsae) { + // Should never happen + log.warn("Caught NoSuchAlgorithmException", nsae); } - return attrs; } + + return metadata; } catch (AmazonS3Exception e) { if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) { - return null; + return metadata; } } catch (AmazonClientException e) { log.error("about(" + key + ", attrs)", e); throw new IOException(e); } - return null; + return metadata; } /** @@ -266,11 +404,53 @@ public void remove(Bitstream bitstream) throws IOException { * @return full key prefixed with a subfolder, if applicable */ public String getFullKey(String id) { + StringBuilder bufFilename = new StringBuilder(); if (StringUtils.isNotEmpty(subfolder)) { - return subfolder + "/" + id; + bufFilename.append(subfolder); + appendSeparator(bufFilename); + } + + if (this.useRelativePath) { + bufFilename.append(getRelativePath(id)); + } else { + bufFilename.append(id); + } + + if (log.isDebugEnabled()) { + log.debug("S3 filepath for " + id + " is " + + bufFilename.toString()); + } + + return bufFilename.toString(); + } + + /** + * there are 2 cases: + * - conventional bitstream, conventional storage + * - registered bitstream, conventional storage + * conventional bitstream: dspace ingested, dspace random name/path + * registered bitstream: registered to dspace, any name/path + * + * @param sInternalId + * @return Computed Relative path + */ + public String getRelativePath(String sInternalId) { + BitstreamStorageService bitstreamStorageService = StorageServiceFactory.getInstance() + .getBitstreamStorageService(); + + String sIntermediatePath = StringUtils.EMPTY; + if (bitstreamStorageService.isRegisteredBitstream(sInternalId)) { + sInternalId = sInternalId.substring(REGISTERED_FLAG.length()); } else { - return id; + sInternalId = sanitizeIdentifier(sInternalId); + sIntermediatePath = getIntermediatePath(sInternalId); } + + return sIntermediatePath + sInternalId; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; } public String getAwsAccessKey() { @@ -316,6 +496,14 @@ public void setSubfolder(String subfolder) { this.subfolder = subfolder; } + public boolean isUseRelativePath() { + return useRelativePath; + } + + public void setUseRelativePath(boolean useRelativePath) { + this.useRelativePath = useRelativePath; + } + /** * Contains a command-line testing tool. Expects arguments: * -a accessKey -s secretKey -f assetFileName @@ -324,32 +512,43 @@ public void setSubfolder(String subfolder) { * @throws Exception generic exception */ public static void main(String[] args) throws Exception { - //TODO use proper CLI, or refactor to be a unit test. Can't mock this without keys though. + //TODO Perhaps refactor to be a unit test. Can't mock this without keys though. // parse command line - String assetFile = null; - String accessKey = null; - String secretKey = null; - - for (int i = 0; i < args.length; i += 2) { - if (args[i].startsWith("-a")) { - accessKey = args[i + 1]; - } else if (args[i].startsWith("-s")) { - secretKey = args[i + 1]; - } else if (args[i].startsWith("-f")) { - assetFile = args[i + 1]; - } - } + Options options = new Options(); + Option option; + + option = Option.builder("a").desc("access key").hasArg().required().build(); + options.addOption(option); + + option = Option.builder("s").desc("secret key").hasArg().required().build(); + options.addOption(option); - if (accessKey == null || secretKey == null || assetFile == null) { - System.out.println("Missing arguments - exiting"); + option = Option.builder("f").desc("asset file name").hasArg().required().build(); + options.addOption(option); + + DefaultParser parser = new DefaultParser(); + + CommandLine command; + try { + command = parser.parse(options, args); + } catch (ParseException e) { + System.err.println(e.getMessage()); + new HelpFormatter().printHelp( + S3BitStoreService.class.getSimpleName() + "options", options); return; } + + String accessKey = command.getOptionValue("a"); + String secretKey = command.getOptionValue("s"); + S3BitStoreService store = new S3BitStoreService(); AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); - store.s3Service = new AmazonS3Client(awsCredentials); + store.s3Service = AmazonS3ClientBuilder.standard() + .withCredentials(new AWSStaticCredentialsProvider(awsCredentials)) + .build(); //Todo configurable region Region usEast1 = Region.getRegion(Regions.US_EAST_1); @@ -358,9 +557,9 @@ public static void main(String[] args) throws Exception { // get hostname of DSpace UI to use to name bucket String hostname = Utils.getHostName(configurationService.getProperty("dspace.ui.url")); //Bucketname should be lowercase - store.bucketName = "dspace-asset-" + hostname + ".s3test"; + store.bucketName = DEFAULT_BUCKET_PREFIX + hostname + ".s3test"; store.s3Service.createBucket(store.bucketName); -/* Broken in DSpace 6 TODO Refactor + /* Broken in DSpace 6 TODO Refactor // time everything, todo, swtich to caliper long start = System.currentTimeMillis(); // Case 1: store a file @@ -413,4 +612,14 @@ public static void main(String[] args) throws Exception { store.get(id); */ } + + /** + * Is this a registered bitstream? (not stored via this service originally) + * @param internalId + * @return + */ + public boolean isRegisteredBitstream(String internalId) { + return internalId.startsWith(REGISTERED_FLAG); + } + } diff --git a/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java b/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java index 209ef5d16be6..7f5ed8f9129f 100644 --- a/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java +++ b/dspace-api/src/main/java/org/dspace/storage/bitstore/service/BitstreamStorageService.java @@ -102,7 +102,7 @@ public interface BitstreamStorageService { public UUID register(Context context, Bitstream bitstream, int assetstore, String bitstreamPath) throws SQLException, IOException, AuthorizeException; - public Map computeChecksum(Context context, Bitstream bitstream) throws IOException; + public Map computeChecksum(Context context, Bitstream bitstream) throws IOException; /** * Does the internal_id column in the bitstream row indicate the bitstream diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java index 98777c654b61..0732eea2a0b9 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java @@ -12,6 +12,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; @@ -25,6 +26,7 @@ import javax.sql.DataSource; import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.core.Context; @@ -36,6 +38,7 @@ import org.flywaydb.core.Flyway; import org.flywaydb.core.api.FlywayException; import org.flywaydb.core.api.MigrationInfo; +import org.flywaydb.core.api.MigrationVersion; import org.flywaydb.core.api.callback.Callback; import org.flywaydb.core.api.configuration.FluentConfiguration; import org.flywaydb.core.internal.info.MigrationInfoDumper; @@ -72,7 +75,6 @@ public class DatabaseUtils { // Types of databases supported by DSpace. See getDbType() public static final String DBMS_POSTGRES = "postgres"; - public static final String DBMS_ORACLE = "oracle"; public static final String DBMS_H2 = "h2"; // Name of the table that Flyway uses for its migration history @@ -92,7 +94,7 @@ public static void main(String[] argv) { // Usage checks if (argv.length < 1) { System.out.println("\nDatabase action argument is missing."); - System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', 'validate', " + + System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', 'skip', 'validate', " + "'update-sequences' or 'clean'"); System.out.println("\nOr, type 'database help' for more information.\n"); System.exit(1); @@ -110,288 +112,349 @@ public static void main(String[] argv) { // *before* any other Flyway commands can be run. This is a safety check. FlywayUpgradeUtils.upgradeFlywayTable(flyway, dataSource.getConnection()); - // "test" = Test Database Connection - if (argv[0].equalsIgnoreCase("test")) { - // Try to connect to the database - System.out.println("\nAttempting to connect to database"); - try (Connection connection = dataSource.getConnection()) { - System.out.println("Connected successfully!"); + // Determine action param passed to "./dspace database" + switch (argv[0].toLowerCase(Locale.ENGLISH)) { + // "test" = Test Database Connection + case "test": + // Try to connect to the database + System.out.println("\nAttempting to connect to database"); + try (Connection connection = dataSource.getConnection()) { + System.out.println("Connected successfully!"); - // Print basic database connection information - printDBInfo(connection); + // Print basic database connection information + printDBInfo(connection); - // Print any database warnings/errors found (if any) - boolean issueFound = printDBIssues(connection); + // Print any database warnings/errors found (if any) + boolean issueFound = printDBIssues(connection); - // If issues found, exit with an error status (even if connection succeeded). - if (issueFound) { + // If issues found, exit with an error status (even if connection succeeded). + if (issueFound) { + System.exit(1); + } else { + System.exit(0); + } + } catch (SQLException sqle) { + System.err.println("\nError running 'test': "); + System.err.println(" - " + sqle); + System.err.println("\nPlease see the DSpace documentation for assistance.\n"); + sqle.printStackTrace(System.err); System.exit(1); - } else { - System.exit(0); } - } catch (SQLException sqle) { - System.err.println("\nError running 'test': "); - System.err.println(" - " + sqle); - System.err.println("\nPlease see the DSpace documentation for assistance.\n"); - sqle.printStackTrace(); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("info") || argv[0].equalsIgnoreCase("status")) { - try (Connection connection = dataSource.getConnection()) { - // Print basic Database info - printDBInfo(connection); - - // Get info table from Flyway - System.out.println("\n" + MigrationInfoDumper.dumpToAsciiTable(flyway.info().all())); - - // If Flyway is NOT yet initialized, also print the determined version information - // NOTE: search is case sensitive, as flyway table name is ALWAYS lowercase, - // See: http://flywaydb.org/documentation/faq.html#case-sensitive - if (!tableExists(connection, flyway.getConfiguration().getTable(), true)) { - System.out - .println("\nNOTE: This database is NOT yet initialized for auto-migrations (via Flyway)."); - // Determine which version of DSpace this looks like - String dbVersion = determineDBVersion(connection); - if (dbVersion != null) { + break; + // "info" and "status" are identical and provide database info + case "info": + case "status": + try (Connection connection = dataSource.getConnection()) { + // Print basic Database info + printDBInfo(connection); + + // Get info table from Flyway + System.out.println("\n" + MigrationInfoDumper.dumpToAsciiTable(flyway.info().all())); + + // If Flyway is NOT yet initialized, also print the determined version information + // NOTE: search is case sensitive, as flyway table name is ALWAYS lowercase, + // See: http://flywaydb.org/documentation/faq.html#case-sensitive + if (!tableExists(connection, flyway.getConfiguration().getTable(), true)) { System.out - .println("\nYour database looks to be compatible with DSpace version " + dbVersion); - System.out.println( - "All upgrades *after* version " + dbVersion + " will be run during the next migration" + - "."); - System.out.println("\nIf you'd like to upgrade now, simply run 'dspace database migrate'."); + .println("\nNOTE: This database is NOT yet initialized for auto-migrations " + + "(via Flyway)."); + // Determine which version of DSpace this looks like + String dbVersion = determineDBVersion(connection); + if (dbVersion != null) { + System.out + .println("\nYour database looks to be compatible with DSpace version " + dbVersion); + System.out.println( + "All upgrades *after* version " + dbVersion + " will be run during the next " + + "migration."); + System.out.println("\nIf you'd like to upgrade now, simply run 'dspace database " + + "migrate'."); + } } - } - // Print any database warnings/errors found (if any) - boolean issueFound = printDBIssues(connection); + // Print any database warnings/errors found (if any) + boolean issueFound = printDBIssues(connection); - // If issues found, exit with an error status - if (issueFound) { + // If issues found, exit with an error status + if (issueFound) { + System.exit(1); + } else { + System.exit(0); + } + } catch (SQLException e) { + System.err.println("Info exception:"); + e.printStackTrace(System.err); System.exit(1); - } else { - System.exit(0); } - } catch (SQLException e) { - System.err.println("Info exception:"); - e.printStackTrace(); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("migrate")) { - try (Connection connection = dataSource.getConnection()) { - System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); - - // "migrate" allows for an OPTIONAL second argument: - // - "ignored" = Also run any previously "ignored" migrations during the migration - // - [version] = ONLY run migrations up to a specific DSpace version (ONLY FOR TESTING) - if (argv.length == 2) { - if (argv[1].equalsIgnoreCase("ignored")) { - System.out.println( - "Migrating database to latest version AND running previously \"Ignored\" " + - "migrations... (Check logs for details)"); - // Update the database to latest version, but set "outOfOrder=true" - // This will ensure any old migrations in the "ignored" state are now run - updateDatabase(dataSource, connection, null, true); + break; + // "migrate" = Run all pending database migrations + case "migrate": + try (Connection connection = dataSource.getConnection()) { + System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); + + // "migrate" allows for an OPTIONAL second argument (only one may be specified): + // - "ignored" = Also run any previously "ignored" migrations during the migration + // - "force" = Even if no pending migrations exist, still run migrate to trigger callbacks. + // - [version] = ONLY run migrations up to a specific DSpace version (ONLY FOR TESTING) + if (argv.length == 2) { + if (argv[1].equalsIgnoreCase("ignored")) { + System.out.println( + "Migrating database to latest version AND running previously \"Ignored\" " + + "migrations... (Check logs for details)"); + // Update the database to latest version, but set "outOfOrder=true" + // This will ensure any old migrations in the "ignored" state are now run + updateDatabase(dataSource, connection, null, true); + } else if (argv[1].equalsIgnoreCase("force")) { + updateDatabase(dataSource, connection, null, false, true); + } else { + // Otherwise, we assume "argv[1]" is a valid migration version number + // This is only for testing! Never specify for Production! + String migrationVersion = argv[1]; + BufferedReader input = new BufferedReader( + new InputStreamReader(System.in, StandardCharsets.UTF_8)); + + System.out.println( + "You've specified to migrate your database ONLY to version " + migrationVersion + + " ..."); + System.out.println( + "\nWARNING: In this mode, we DISABLE all callbacks, which means that you will " + + "need to manually update registries and manually run a reindex. This is " + + "because you are attempting to use an OLD version (" + migrationVersion + ") " + + "Database with a newer DSpace API. NEVER do this in a PRODUCTION scenario. " + + "The resulting database is only useful for migration testing.\n"); + + System.out.print( + "Are you SURE you only want to migrate your database to version " + + migrationVersion + "? [y/n]: "); + String choiceString = input.readLine(); + input.close(); + + if (choiceString.equalsIgnoreCase("y")) { + System.out.println( + "Migrating database ONLY to version " + migrationVersion + " ... " + + "(Check logs for details)"); + // Update the database, to the version specified. + updateDatabase(dataSource, connection, migrationVersion, false); + } else { + System.out.println("No action performed."); + } + } } else { - // Otherwise, we assume "argv[1]" is a valid migration version number - // This is only for testing! Never specify for Production! + System.out.println("Migrating database to latest version... " + + "(Check dspace logs for details)"); + updateDatabase(dataSource, connection); + } + System.out.println("Done."); + System.exit(0); + } catch (SQLException e) { + System.err.println("Migration exception:"); + e.printStackTrace(System.err); + System.exit(1); + } + break; + // "repair" = Run Flyway repair script + case "repair": + try (Connection connection = dataSource.getConnection();) { + System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); + System.out.println( + "Attempting to repair any previously failed migrations (or mismatched checksums) via " + + "FlywayDB... (Check dspace logs for details)"); + flyway.repair(); + System.out.println("Done."); + System.exit(0); + } catch (SQLException | FlywayException e) { + System.err.println("Repair exception:"); + e.printStackTrace(System.err); + System.exit(1); + } + break; + // "skip" = Skip a specific Flyway migration (by telling Flyway it succeeded) + case "skip": + try { + // "skip" requires a migration version to skip. Only that exact version will be skipped. + if (argv.length == 2) { String migrationVersion = argv[1]; - BufferedReader input = new BufferedReader(new InputStreamReader(System.in)); + BufferedReader input = new BufferedReader( + new InputStreamReader(System.in, StandardCharsets.UTF_8)); System.out.println( - "You've specified to migrate your database ONLY to version " + migrationVersion + " " + + "You've specified to SKIP the migration with version='" + migrationVersion + "' " + "..."); - System.out.println( - "\nWARNING: In this mode, we DISABLE all callbacks, which means that you will need " + - "to manually update registries and manually run a reindex. This is because you " + - "are attempting to use an OLD version (" + migrationVersion + ") Database with " + - "a newer DSpace API. NEVER do this in a PRODUCTION scenario. The resulting " + - "database is only useful for migration testing.\n"); - System.out.print( - "Are you SURE you only want to migrate your database to version " + migrationVersion - + "? [y/n]: "); + "\nWARNING: You should only skip migrations which are no longer required or have " + + "become obsolete. Skipping a REQUIRED migration may result in DSpace failing " + + "to startup or function properly. Are you sure you want to SKIP the " + + "migration with version '" + migrationVersion + "'? [y/n]: "); String choiceString = input.readLine(); input.close(); if (choiceString.equalsIgnoreCase("y")) { System.out.println( - "Migrating database ONLY to version " + migrationVersion + " ... (Check logs for " + - "details)"); - // Update the database, to the version specified. - updateDatabase(dataSource, connection, migrationVersion, false); - } else { - System.out.println("No action performed."); + "Attempting to skip migration with version " + migrationVersion + " " + + "... (Check logs for details)"); + skipMigration(dataSource, migrationVersion); } + } else { + System.out.println("The 'skip' command REQUIRES a version to be specified. " + + "Only that single migration will be skipped. For the list " + + "of migration versions use the 'info' command."); } - } else { - System.out.println("Migrating database to latest version... (Check dspace logs for details)"); - updateDatabase(dataSource, connection); + } catch (IOException e) { + System.err.println("Exception when attempting to skip migration:"); + e.printStackTrace(System.err); + System.exit(1); } - System.out.println("Done."); - System.exit(0); - } catch (SQLException e) { - System.err.println("Migration exception:"); - e.printStackTrace(); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("repair")) { - // "repair" = Run Flyway repair script - - try (Connection connection = dataSource.getConnection();) { - System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); - System.out.println( - "Attempting to repair any previously failed migrations (or mismatched checksums) via " + - "FlywayDB... (Check dspace logs for details)"); - flyway.repair(); - System.out.println("Done."); - System.exit(0); - } catch (SQLException | FlywayException e) { - System.err.println("Repair exception:"); - e.printStackTrace(); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("validate")) { + break; // "validate" = Run Flyway validation to check for database errors/issues - - try (Connection connection = dataSource.getConnection();) { - System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); - System.out - .println("Attempting to validate database status (and migration checksums) via FlywayDB..."); - flyway.validate(); - System.out.println("No errors thrown. Validation succeeded. (Check dspace logs for more details)"); - System.exit(0); - } catch (SQLException | FlywayException e) { - System.err.println("Validation exception:"); - e.printStackTrace(); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("clean")) { + case "validate": + try (Connection connection = dataSource.getConnection();) { + System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); + System.out + .println("Attempting to validate database status (and migration checksums) via " + + "FlywayDB..."); + flyway.validate(); + System.out.println("No errors thrown. Validation succeeded. (Check dspace logs for more " + + "details)"); + System.exit(0); + } catch (SQLException | FlywayException e) { + System.err.println("Validation exception:"); + e.printStackTrace(System.err); + System.exit(1); + } + break; // "clean" = Run Flyway clean script + case "clean": + // If clean is disabled, return immediately + if (flyway.getConfiguration().isCleanDisabled()) { + System.out.println( + "\nWARNING: 'clean' command is currently disabled, as it is dangerous to run in " + + "Production scenarios!"); + System.out.println( + "\nIn order to run a 'clean' you first must enable it in your DSpace config by " + + "specifying 'db.cleanDisabled=false'.\n"); + System.exit(1); + } - // If clean is disabled, return immediately - if (flyway.getConfiguration().isCleanDisabled()) { - System.out.println( - "\nWARNING: 'clean' command is currently disabled, as it is dangerous to run in Production " + - "scenarios!"); - System.out.println( - "\nIn order to run a 'clean' you first must enable it in your DSpace config by specifying 'db" + - ".cleanDisabled=false'.\n"); - System.exit(1); - } - - try (Connection connection = dataSource.getConnection()) { - String dbType = getDbType(connection); + try (Connection connection = dataSource.getConnection()) { + String dbType = getDbType(connection); - // Not all Postgres user accounts will be able to run a 'clean', - // as only 'superuser' accounts can remove the 'pgcrypto' extension. - if (dbType.equals(DBMS_POSTGRES)) { - // Check if database user has permissions suitable to run a clean - if (!PostgresUtils.checkCleanPermissions(connection)) { - String username = connection.getMetaData().getUserName(); - // Exit immediately, providing a descriptive error message - System.out.println( - "\nERROR: The database user '" + username + "' does not have sufficient privileges to" + - " run a 'database clean' (via Flyway)."); - System.out.println( - "\nIn order to run a 'clean', the database user MUST have 'superuser' privileges"); - System.out.println( - "OR the '" + PostgresUtils.PGCRYPTO + "' extension must be installed in a separate " + - "schema (see documentation)."); - System.out.println( - "\nOptionally, you could also manually remove the '" + PostgresUtils.PGCRYPTO + "' " + - "extension first (DROP EXTENSION " + PostgresUtils.PGCRYPTO + " CASCADE;), then " + - "rerun the 'clean'"); - System.exit(1); + // Not all Postgres user accounts will be able to run a 'clean', + // as only 'superuser' accounts can remove the 'pgcrypto' extension. + if (dbType.equals(DBMS_POSTGRES)) { + // Check if database user has permissions suitable to run a clean + if (!PostgresUtils.checkCleanPermissions(connection)) { + String username = connection.getMetaData().getUserName(); + // Exit immediately, providing a descriptive error message + System.out.println( + "\nERROR: The database user '" + username + "' does not have sufficient " + + "privileges to run a 'database clean' (via Flyway)."); + System.out.println( + "\nIn order to run a 'clean', the database user MUST have 'superuser' privileges"); + System.out.println( + "OR the '" + PostgresUtils.PGCRYPTO + "' extension must be installed in a " + + "separate schema (see documentation)."); + System.out.println( + "\nOptionally, you could also manually remove the '" + PostgresUtils.PGCRYPTO + + "' extension first (DROP EXTENSION " + PostgresUtils.PGCRYPTO + + " CASCADE;), then rerun the 'clean'"); + System.exit(1); + } } - } - BufferedReader input = new BufferedReader(new InputStreamReader(System.in)); + BufferedReader input = new BufferedReader(new InputStreamReader(System.in, + StandardCharsets.UTF_8)); - System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); - System.out - .println("\nWARNING: ALL DATA AND TABLES IN YOUR DATABASE WILL BE PERMANENTLY DELETED.\n"); - System.out.println("There is NO turning back from this action. Backup your DB before continuing."); - if (dbType.equals(DBMS_ORACLE)) { - System.out.println("\nORACLE WARNING: your RECYCLEBIN will also be PURGED.\n"); - } else if (dbType.equals(DBMS_POSTGRES)) { - System.out.println( - "\nPOSTGRES WARNING: the '" + PostgresUtils.PGCRYPTO + "' extension will be dropped if it" + - " is in the same schema as the DSpace database.\n"); - } - System.out.print("Do you want to PERMANENTLY DELETE everything from your database? [y/n]: "); - String choiceString = input.readLine(); - input.close(); - - if (choiceString.equalsIgnoreCase("y")) { - System.out.println("Scrubbing database clean... (Check dspace logs for details)"); - cleanDatabase(flyway, dataSource); - System.out.println("Done."); - System.exit(0); - } else { - System.out.println("No action performed."); + System.out.println("\nDatabase URL: " + connection.getMetaData().getURL()); + System.out + .println("\nWARNING: ALL DATA AND TABLES IN YOUR DATABASE WILL BE PERMANENTLY DELETED.\n"); + System.out.println("There is NO turning back from this action. Backup your DB before " + + "continuing."); + if (dbType.equals(DBMS_POSTGRES)) { + System.out.println( + "\nPOSTGRES WARNING: the '" + PostgresUtils.PGCRYPTO + "' extension will be dropped " + + "if it is in the same schema as the DSpace database.\n"); + } + System.out.print("Do you want to PERMANENTLY DELETE everything from your database? [y/n]: "); + String choiceString = input.readLine(); + input.close(); + + if (choiceString.equalsIgnoreCase("y")) { + System.out.println("Scrubbing database clean... (Check dspace logs for details)"); + cleanDatabase(flyway, dataSource); + System.out.println("Done."); + System.exit(0); + } else { + System.out.println("No action performed."); + } + } catch (SQLException e) { + System.err.println("Clean exception:"); + e.printStackTrace(System.err); + System.exit(1); } - } catch (SQLException e) { - System.err.println("Clean exception:"); - e.printStackTrace(); - System.exit(1); - } - } else if (argv[0].equalsIgnoreCase("update-sequences")) { - try (Connection connection = dataSource.getConnection()) { - String dbType = getDbType(connection); - String sqlfile = "org/dspace/storage/rdbms/sqlmigration/" + dbType + - "/update-sequences.sql"; - InputStream sqlstream = DatabaseUtils.class.getClassLoader().getResourceAsStream(sqlfile); - if (sqlstream != null) { - String s = IOUtils.toString(sqlstream, "UTF-8"); - if (!s.isEmpty()) { - System.out.println("Running " + sqlfile); - connection.createStatement().execute(s); - System.out.println("update-sequences complete"); + break; + // "update-sequences" = Run DSpace's "update-sequences.sql" script + case "update-sequences": + try (Connection connection = dataSource.getConnection()) { + String dbType = getDbType(connection); + String sqlfile = "org/dspace/storage/rdbms/sqlmigration/" + dbType + + "/update-sequences.sql"; + InputStream sqlstream = DatabaseUtils.class.getClassLoader().getResourceAsStream(sqlfile); + if (sqlstream != null) { + String s = IOUtils.toString(sqlstream, StandardCharsets.UTF_8); + if (!s.isEmpty()) { + System.out.println("Running " + sqlfile); + connection.createStatement().execute(s); + System.out.println("update-sequences complete"); + } else { + System.err.println(sqlfile + " contains no SQL to execute"); + } } else { - System.err.println(sqlfile + " contains no SQL to execute"); + System.err.println(sqlfile + " not found"); } - } else { - System.err.println(sqlfile + " not found"); } - } - } else { - System.out.println("\nUsage: database [action]"); - System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', " + - "'update-sequences' or 'clean'"); - System.out.println( - " - test = Performs a test connection to database to " + - "validate connection settings"); - System.out.println( - " - info / status = Describe basic info/status about database, including validating the " + - "compatibility of this database"); - System.out.println( - " - migrate = Migrate the database to the latest version"); - System.out.println( - " - repair = Attempt to repair any previously failed database " + - "migrations or checksum mismatches (via Flyway repair)"); - System.out.println( - " - validate = Validate current database's migration status (via Flyway validate), " + - "validating all migration checksums."); - System.out.println( - " - update-sequences = Update database sequences after running AIP ingest."); - System.out.println( - " - clean = DESTROY all data and tables in database " + - "(WARNING there is no going back!). " + - "Requires 'db.cleanDisabled=false' setting in config."); - System.out.println(""); - System.exit(0); + break; + // default = show help information + default: + System.out.println("\nUsage: database [action]"); + System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', 'skip', " + + "'validate', 'update-sequences' or 'clean'"); + System.out.println( + " - test = Performs a test connection to database to " + + "validate connection settings"); + System.out.println( + " - info / status = Describe basic info/status about database, including validating the " + + "compatibility of this database"); + System.out.println( + " - migrate = Migrate the database to the latest version"); + System.out.println( + " - repair = Attempt to repair any previously failed database " + + "migrations or checksum mismatches (via Flyway repair)"); + System.out.println( + " - skip [version] = Skip a single, pending or ignored migration, " + + "ensuring it never runs."); + System.out.println( + " - validate = Validate current database's migration status (via Flyway validate), " + + "validating all migration checksums."); + System.out.println( + " - update-sequences = Update database sequences after running AIP ingest."); + System.out.println( + " - clean = DESTROY all data and tables in database " + + "(WARNING there is no going back!). " + + "Requires 'db.cleanDisabled=false' setting in config."); + System.out.println(""); + System.exit(0); + break; } } catch (Exception e) { System.err.println("Caught exception:"); - e.printStackTrace(); + e.printStackTrace(System.err); System.exit(1); } } /** * Print basic information about the current database to System.out. - * This is utilized by both the 'test' and 'info' commandline options. + * This is utilized by both the 'test' and 'info' command line options. * * @param connection current database connection * @throws SQLException if database error occurs @@ -401,6 +464,11 @@ private static void printDBInfo(Connection connection) throws SQLException { DatabaseMetaData meta = connection.getMetaData(); String dbType = getDbType(connection); System.out.println("\nDatabase Type: " + dbType); + if (!dbType.equals(DBMS_POSTGRES) && !dbType.equals(DBMS_H2)) { + System.err.println("===================================="); + System.err.println("ERROR: Database type " + dbType + " is UNSUPPORTED!"); + System.err.println("====================================="); + } System.out.println("Database URL: " + meta.getURL()); System.out.println("Database Schema: " + getSchemaName(connection)); System.out.println("Database Username: " + meta.getUserName()); @@ -654,6 +722,34 @@ protected static synchronized void updateDatabase(DataSource datasource, Connect protected static synchronized void updateDatabase(DataSource datasource, Connection connection, String targetVersion, boolean outOfOrder) throws SQLException { + updateDatabase(datasource, connection, targetVersion, outOfOrder, false); + } + + /** + * Ensures the current database is up-to-date with regards + * to the latest DSpace DB schema. If the scheme is not up-to-date, + * then any necessary database migrations are performed. + *

    + * FlywayDB (http://flywaydb.org/) is used to perform database migrations. + * If a Flyway DB migration fails it will be rolled back to the last + * successful migration, and any errors will be logged. + * + * @param datasource DataSource object (retrieved from DatabaseManager()) + * @param connection Database connection + * @param targetVersion If specified, only migrate the database to a particular *version* of DSpace. This is + * just useful for testing migrations, and should NOT be used in Production. + * If null, the database is migrated to the latest version. + * @param outOfOrder If true, Flyway will run any lower version migrations that were previously "ignored". + * If false, Flyway will only run new migrations with a higher version number. + * @param forceMigrate If true, always run a Flyway migration, even if no "Pending" migrations exist. This can be + * used to trigger Flyway Callbacks manually. + * If false, only run migration if pending migrations exist, otherwise do nothing. + * @throws SQLException if database error + * If database cannot be upgraded. + */ + protected static synchronized void updateDatabase(DataSource datasource, Connection connection, + String targetVersion, boolean outOfOrder, boolean forceMigrate) + throws SQLException { if (null == datasource) { throw new SQLException("The datasource is a null reference -- cannot continue."); } @@ -730,6 +826,10 @@ protected static synchronized void updateDatabase(DataSource datasource, // Flag that Discovery will need reindexing, since database was updated setReindexDiscovery(reindexAfterUpdate); + } else if (forceMigrate) { + log.info("DSpace database schema is up to date, but 'force' was specified. " + + "Running migrate command to trigger callbacks."); + flyway.migrate(); } else { log.info("DSpace database schema is up to date"); } @@ -739,6 +839,89 @@ protected static synchronized void updateDatabase(DataSource datasource, } } + /** + * Skips the given migration by marking it as "successful" in the Flyway table. This ensures + * the given migration will never be run again. + *

    + * WARNING: Skipping a required migration can result in unexpected errors. Make sure the migration is + * not required (or obsolete) before skipping it. + * @param dataSource current DataSource + * @param skipVersion version of migration to skip + * @throws SQLException if error occurs + */ + private static synchronized void skipMigration(DataSource dataSource, + String skipVersion) throws SQLException { + if (null == dataSource) { + throw new SQLException("The datasource is a null reference -- cannot continue."); + } + + try (Connection connection = dataSource.getConnection()) { + // Setup Flyway API against our database + FluentConfiguration flywayConfiguration = setupFlyway(dataSource); + + // In order to allow for skipping "Ignored" migrations, we MUST set "outOfOrder=true". + // (Otherwise Ignored migrations never appear in the pending list) + flywayConfiguration.outOfOrder(true); + + // Initialized Flyway object based on this configuration + Flyway flyway = flywayConfiguration.load(); + + // Find the migration we are skipping in the list of pending migrations + boolean foundMigration = false; + for (MigrationInfo migration : flyway.info().pending()) { + // If this migration matches our "skipVersion" + if (migration.getVersion().equals(MigrationVersion.fromVersion(skipVersion))) { + foundMigration = true; + System.out.println("Found migration matching version='" + skipVersion + "'. " + + "Changing state to 'Success' in order to skip it."); + + PreparedStatement statement = null; + try { + // Create SQL Insert which will log this migration as having already been run. + String INSERT_SQL = "INSERT INTO " + FLYWAY_TABLE + " " + + "(" + + "installed_rank, version, description, type, script, " + + "checksum, installed_by, execution_time, success" + + ") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"; + statement = connection.prepareStatement(INSERT_SQL); + // installed_rank + statement.setInt(1, getNextFlywayInstalledRank(flyway)); + // version + statement.setString(2, migration.getVersion().getVersion()); + // description + statement.setString(3, migration.getDescription()); + // type + statement.setString(4, migration.getType().toString()); + // script + statement.setString(5, migration.getScript()); + // checksum + statement.setInt(6, migration.getChecksum()); + // installed_by + statement.setString(7, getDBUserName(connection)); + // execution_time is set to zero as we didn't really execute it + statement.setInt(8, 0); + // success=true tells Flyway this migration no longer needs to be run. + statement.setBoolean(9, true); + + // Run the INSERT + statement.executeUpdate(); + } finally { + if (statement != null && !statement.isClosed()) { + statement.close(); + } + } + } + } + if (!foundMigration) { + System.err.println("Could not find migration to skip! " + + "No 'Pending' or 'Ignored' migrations match version='" + skipVersion + "'"); + } + } catch (FlywayException fe) { + // If any FlywayException (Runtime) is thrown, change it to a SQLException + throw new SQLException("Flyway error occurred", fe); + } + } + /** * Clean the existing database, permanently removing all data and tables *

    @@ -755,26 +938,6 @@ private static synchronized void cleanDatabase(Flyway flyway, DataSource dataSou // First, run Flyway's clean command on database. // For MOST database types, this takes care of everything flyway.clean(); - - try (Connection connection = dataSource.getConnection()) { - // Get info about which database type we are using - String dbType = getDbType(connection); - - // If this is Oracle, the only way to entirely clean the database - // is to also purge the "Recyclebin". See: - // http://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_9018.htm - if (dbType.equals(DBMS_ORACLE)) { - PreparedStatement statement = null; - try { - statement = connection.prepareStatement("PURGE RECYCLEBIN"); - statement.executeQuery(); - } finally { - if (statement != null && !statement.isClosed()) { - statement.close(); - } - } - } - } } catch (FlywayException fe) { // If any FlywayException (Runtime) is thrown, change it to a SQLException throw new SQLException("Flyway clean error occurred", fe); @@ -1023,11 +1186,6 @@ public static boolean sequenceExists(Connection connection, String sequenceName) // We need to filter by schema in PostgreSQL schemaFilter = true; break; - case DBMS_ORACLE: - // Oracle specific query for a sequence owned by our current DSpace user - // NOTE: No need to filter by schema for Oracle, as Schema = User - sequenceSQL = "SELECT COUNT(1) FROM user_sequences WHERE sequence_name=?"; - break; case DBMS_H2: // In H2, sequences are listed in the "information_schema.sequences" table // SEE: http://www.h2database.com/html/grammar.html#information_schema @@ -1131,11 +1289,6 @@ public static String getSchemaName(Connection connection) // For PostgreSQL, the default schema is named "public" // See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html schema = "public"; - } else if (dbType.equals(DBMS_ORACLE)) { - // For Oracle, default schema is actually the user account - // See: http://stackoverflow.com/a/13341390 - DatabaseMetaData meta = connection.getMetaData(); - schema = meta.getUserName(); } else { // For H2 (in memory), there is no such thing as a schema schema = null; @@ -1145,6 +1298,34 @@ public static String getSchemaName(Connection connection) return schema; } + /** + * Get the Database User Name in use by this Connection. + * + * @param connection Current Database Connection + * @return User name as a string, or "null" if cannot be determined or unspecified + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public static String getDBUserName(Connection connection) + throws SQLException { + String username = null; + + // Try to get the schema from the DB connection itself. + // As long as the Database driver supports JDBC4.1, there should be a getSchema() method + // If this method is unimplemented or doesn't exist, it will throw an exception (likely an AbstractMethodError) + try { + username = connection.getMetaData().getUserName(); + } catch (Exception | AbstractMethodError e) { + // ignore + } + + // If we don't know our schema, let's try the schema in the DSpace configuration + if (StringUtils.isBlank(username)) { + username = canonicalize(connection, DSpaceServicesFactory.getInstance().getConfigurationService() + .getProperty("db.username")); + } + return username; + } + /** * Return the canonical name for a database identifier based on whether this * database defaults to storing identifiers in uppercase or lowercase. @@ -1284,13 +1465,14 @@ public void run() { Context context = null; try { context = new Context(); + context.setMode(Context.Mode.READ_ONLY); context.turnOffAuthorisationSystem(); log.info( "Post database migration, reindexing all content in Discovery search and browse engine"); // Reindex Discovery completely // Force clean all content - this.indexer.cleanIndex(true); + this.indexer.deleteIndex(); // Recreate the entire index (overwriting existing one) this.indexer.createIndex(context); // Rebuild spell checker (which is based on index) @@ -1333,8 +1515,6 @@ public static String getDbType(Connection connection) String dbms_lc = prodName.toLowerCase(Locale.ROOT); if (dbms_lc.contains("postgresql")) { return DBMS_POSTGRES; - } else if (dbms_lc.contains("oracle")) { - return DBMS_ORACLE; } else if (dbms_lc.contains("h2")) { // Used for unit testing only return DBMS_H2; @@ -1396,4 +1576,22 @@ public static Double getCurrentFlywayDSpaceState(Connection connection) throws S } return null; } + + /** + * Determine next valid "installed_rank" value from Flyway, based on the "installed_rank" of the + * last applied migration. + * @param flyway currently loaded Flyway + * @return next installed rank value + */ + private static int getNextFlywayInstalledRank(Flyway flyway) throws FlywayException { + // Load all applied migrations + MigrationInfo[] appliedMigrations = flyway.info().applied(); + // If no applied migrations, throw an error. + // This should never happen, but this would mean Flyway is not installed or initialized + if (ArrayUtils.isEmpty(appliedMigrations)) { + throw new FlywayException("Cannot determine next 'installed_rank' as no applied migrations exist"); + } + // Find the last migration in the list, and increment its "installed_rank" by one. + return appliedMigrations[appliedMigrations.length - 1].getInstalledRank() + 1; + } } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/EntityTypeServiceInitializer.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/EntityTypeServiceInitializer.java new file mode 100644 index 000000000000..e0e41516d01f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/EntityTypeServiceInitializer.java @@ -0,0 +1,78 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.storage.rdbms; +import org.apache.logging.log4j.Logger; +import org.dspace.content.service.EntityTypeService; +import org.dspace.core.Context; +import org.flywaydb.core.api.callback.Callback; +import org.flywaydb.core.api.callback.Event; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Callback method to ensure that the default EntityTypes are created in the database + * AFTER the database migration completes. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public class EntityTypeServiceInitializer implements Callback { + + private final Logger log = org.apache.logging.log4j.LogManager.getLogger(EntityTypeServiceInitializer.class); + + @Autowired(required = true) + private EntityTypeService entityTypeService; + + private void initEntityTypes() { + // After every migrate, ensure default EntityTypes are setup correctly. + Context context = null; + try { + context = new Context(); + context.turnOffAuthorisationSystem(); + // While it's not really a formal "registry", we need to ensure the + // default, required EntityTypes exist in the DSpace database + entityTypeService.initDefaultEntityTypeNames(context); + context.restoreAuthSystemState(); + // Commit changes and close context + context.complete(); + } catch (Exception e) { + log.error("Error attempting to add/update default DSpace EntityTypes", e); + throw new RuntimeException(e); + } finally { + // Clean up our context, if it still exists & it was never completed + if (context != null && context.isValid()) { + context.abort(); + } + } + } + + /** + * The callback name, Flyway will use this to sort the callbacks alphabetically before executing them + * @return The callback name + */ + @Override + public String getCallbackName() { + // Return class name only (not prepended by package) + return EntityTypeServiceInitializer.class.getSimpleName(); + } + + @Override + public boolean supports(Event event, org.flywaydb.core.api.callback.Context context) { + // Must run AFTER all migrations complete, since it is dependent on Hibernate + return event.equals(Event.AFTER_MIGRATE); + } + + @Override + public boolean canHandleInTransaction(Event event, org.flywaydb.core.api.callback.Context context) { + return true; + } + + @Override + public void handle(Event event, org.flywaydb.core.api.callback.Context context) { + initEntityTypes(); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java index 7338dd75bcb7..54498a1c644a 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/GroupServiceInitializer.java @@ -51,6 +51,16 @@ public void initGroups() { } + /** + * The callback name, Flyway will use this to sort the callbacks alphabetically before executing them + * @return The callback name + */ + @Override + public String getCallbackName() { + // Return class name only (not prepended by package) + return GroupServiceInitializer.class.getSimpleName(); + } + /** * Events supported by this callback. * @param event Flyway event diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java index 5798f4254cdc..5459cc3cc35e 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/PostgreSQLCryptoChecker.java @@ -97,6 +97,16 @@ public void removePgCrypto(Connection connection) { } } + /** + * The callback name, Flyway will use this to sort the callbacks alphabetically before executing them + * @return The callback name + */ + @Override + public String getCallbackName() { + // Return class name only (not prepended by package) + return PostgreSQLCryptoChecker.class.getSimpleName(); + } + /** * Events supported by this callback. * @param event Flyway event diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java index ae8be0988a12..7debf3ba449b 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/RegistryUpdater.java @@ -12,6 +12,7 @@ import java.sql.SQLException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPathExpressionException; import org.dspace.administer.MetadataImporter; import org.dspace.administer.RegistryImportException; @@ -89,7 +90,7 @@ private void updateRegistries() { } catch (IOException | SQLException | ParserConfigurationException | TransformerException | RegistryImportException | AuthorizeException | NonUniqueMetadataException - | SAXException e) { + | SAXException | XPathExpressionException e) { log.error("Error attempting to update Bitstream Format and/or Metadata Registries", e); throw new RuntimeException("Error attempting to update Bitstream Format and/or Metadata Registries", e); } finally { @@ -101,6 +102,16 @@ private void updateRegistries() { } + /** + * The callback name, Flyway will use this to sort the callbacks alphabetically before executing them + * @return The callback name + */ + @Override + public String getCallbackName() { + // Return class name only (not prepended by package) + return RegistryUpdater.class.getSimpleName(); + } + /** * Events supported by this callback. * @param event Flyway event diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java index 26e76804e1e5..872a633146af 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/SiteServiceInitializer.java @@ -73,6 +73,16 @@ public void initializeSiteObject() { } + /** + * The callback name, Flyway will use this to sort the callbacks alphabetically before executing them + * @return The callback name + */ + @Override + public String getCallbackName() { + // Return class name only (not prepended by package) + return SiteServiceInitializer.class.getSimpleName(); + } + /** * Events supported by this callback. * @param event Flyway event diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/postgres/DSpacePostgreSQL82Dialect.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/postgres/DSpacePostgreSQL82Dialect.java deleted file mode 100644 index 2701c22fd208..000000000000 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/postgres/DSpacePostgreSQL82Dialect.java +++ /dev/null @@ -1,67 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.storage.rdbms.hibernate.postgres; - -import java.sql.Types; - -import org.hibernate.dialect.PostgreSQL82Dialect; -import org.hibernate.service.ServiceRegistry; -import org.hibernate.type.PostgresUUIDType; -import org.hibernate.type.descriptor.sql.LongVarcharTypeDescriptor; -import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; - -/** - * UUID's are not supported by default in hibernate due to differences in the database in order to fix this a custom - * sql dialect is needed. - * Source: https://forum.hibernate.org/viewtopic.php?f=1&t=1014157 - * - * @author kevinvandevelde at atmire.com - */ -public class DSpacePostgreSQL82Dialect extends PostgreSQL82Dialect { - @Override - public void contributeTypes(final org.hibernate.boot.model.TypeContributions typeContributions, - final ServiceRegistry serviceRegistry) { - super.contributeTypes(typeContributions, serviceRegistry); - typeContributions.contributeType(new InternalPostgresUUIDType()); - } - - @Override - protected void registerHibernateType(int code, String name) { - super.registerHibernateType(code, name); - } - - protected static class InternalPostgresUUIDType extends PostgresUUIDType { - - @Override - protected boolean registerUnderJavaType() { - return true; - } - } - - /** - * Override is needed to properly support the CLOB on metadatavalue in Postgres and Oracle. - * - * @param sqlCode {@linkplain java.sql.Types JDBC type-code} for the column mapped by this type. - * @return Descriptor for the SQL/JDBC side of a value mapping. - */ - @Override - public SqlTypeDescriptor getSqlTypeDescriptorOverride(int sqlCode) { - SqlTypeDescriptor descriptor; - switch (sqlCode) { - case Types.CLOB: { - descriptor = LongVarcharTypeDescriptor.INSTANCE; - break; - } - default: { - descriptor = super.getSqlTypeDescriptorOverride(sqlCode); - break; - } - } - return descriptor; - } -} diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java index 624d0cb55a5a..f0c4e4e17990 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java @@ -78,18 +78,12 @@ protected static Integer dropDBConstraint(Connection connection, String tableNam constraintName += "_" + StringUtils.lowerCase(constraintSuffix); cascade = true; break; - case "oracle": - // In Oracle, constraints are listed in the USER_CONS_COLUMNS table - constraintNameSQL = "SELECT CONSTRAINT_NAME " + - "FROM USER_CONS_COLUMNS " + - "WHERE TABLE_NAME = ? AND COLUMN_NAME = ?"; - cascade = true; - break; case "h2": - // In H2, constraints are listed in the "information_schema.constraints" table + // In H2, column constraints are listed in the "INFORMATION_SCHEMA.KEY_COLUMN_USAGE" table constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " + - "FROM information_schema.constraints " + - "WHERE table_name = ? AND column_list = ?"; + "FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE " + + "WHERE TABLE_NAME = ? AND COLUMN_NAME = ?"; + cascade = true; break; default: throw new SQLException("DBMS " + dbtype + " is unsupported in this migration."); @@ -159,9 +153,6 @@ protected static Integer dropDBTable(Connection connection, String tableName) case "postgresql": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; - case "oracle": - dropTableSQL = "DROP TABLE " + tableName + " CASCADE CONSTRAINTS"; - break; case "h2": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; @@ -207,9 +198,6 @@ protected static Integer dropDBSequence(Connection connection, String sequenceNa case "postgresql": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; - case "oracle": - dropSequenceSQL = "DROP SEQUENCE " + sequenceName; - break; case "h2": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; @@ -255,9 +243,6 @@ protected static Integer dropDBView(Connection connection, String viewName) case "postgresql": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; - case "oracle": - dropViewSQL = "DROP VIEW " + viewName + " CASCADE CONSTRAINTS"; - break; case "h2": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java index 56c5b474d9fc..758e745ddc86 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java @@ -19,10 +19,9 @@ * of the "community" table. This is necessary for the upgrade from 1.3 to 1.4 *

    * This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

    * NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java index 6d82055e530e..37100a17f926 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java @@ -19,10 +19,9 @@ * from 1.5 to 1.6 *

    * This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

    * NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java index ea72d99b6e29..8e2be91127c8 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java @@ -20,10 +20,9 @@ * this column must be renamed to "resource_id". *

    * This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

    * NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java index b3306a9fc93c..0361e6805356 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java @@ -67,8 +67,6 @@ public void migrate(Context context) String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java index 9aa0f4877c39..4c1cf3365395 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java @@ -46,8 +46,6 @@ public void migrate(Context context) throws Exception { String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/java/org/dspace/submit/consumer/SubmissionConfigConsumer.java b/dspace-api/src/main/java/org/dspace/submit/consumer/SubmissionConfigConsumer.java new file mode 100644 index 000000000000..a593fe8ae066 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/consumer/SubmissionConfigConsumer.java @@ -0,0 +1,83 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.consumer; + +import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.discovery.IndexingService; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.event.Consumer; +import org.dspace.event.Event; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.factory.SubmissionServiceFactory; + +/** + * Consumer implementation to be used for Item Submission Configuration + * + * @author paulo.graca at fccn.pt + */ +public class SubmissionConfigConsumer implements Consumer { + /** + * log4j logger + */ + private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SubmissionConfigConsumer.class); + + IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName(IndexingService.class.getName(), + IndexingService.class); + + @Override + public void initialize() throws Exception { + // No-op + } + + @Override + public void consume(Context ctx, Event event) throws Exception { + int st = event.getSubjectType(); + int et = event.getEventType(); + + + if ( st == Constants.COLLECTION ) { + switch (et) { + case Event.MODIFY_METADATA: + // Submission configuration it's based on solr + // for collection's entity type but, at this point + // that info isn't indexed yet, we need to force it + DSpaceObject subject = event.getSubject(ctx); + Collection collectionFromDSOSubject = (Collection) subject; + indexer.indexContent(ctx, new IndexableCollection (collectionFromDSOSubject), true, false, false); + indexer.commit(); + + log.debug("SubmissionConfigConsumer occured: " + event.toString()); + // reload submission configurations + SubmissionServiceFactory.getInstance().getSubmissionConfigService().reload(); + break; + + default: + log.debug("SubmissionConfigConsumer occured: " + event.toString()); + // reload submission configurations + SubmissionServiceFactory.getInstance().getSubmissionConfigService().reload(); + break; + } + } + } + + @Override + public void end(Context ctx) throws Exception { + // No-op + } + + @Override + public void finish(Context ctx) throws Exception { + // No-op + } + +} diff --git a/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactory.java b/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactory.java new file mode 100644 index 000000000000..6020f13b46cc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactory.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.factory; + +import org.dspace.app.util.SubmissionConfigReaderException; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.service.SubmissionConfigService; + +/** + * Abstract factory to get services for submission, use SubmissionServiceFactory.getInstance() to retrieve an + * implementation + * + * @author paulo.graca at fccn.pt + */ +public abstract class SubmissionServiceFactory { + + public abstract SubmissionConfigService getSubmissionConfigService() throws SubmissionConfigReaderException; + + public static SubmissionServiceFactory getInstance() { + return DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName("submissionServiceFactory", SubmissionServiceFactory.class); + } +} diff --git a/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactoryImpl.java new file mode 100644 index 000000000000..19f050859769 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/factory/SubmissionServiceFactoryImpl.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.factory; + +import org.dspace.app.util.SubmissionConfigReaderException; +import org.dspace.submit.service.SubmissionConfigService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Factory implementation to get services for submission, use SubmissionServiceFactory.getInstance() to + * retrieve an implementation + * + * @author paulo.graca at fccn.pt + */ +public class SubmissionServiceFactoryImpl extends SubmissionServiceFactory { + @Autowired(required = true) + private SubmissionConfigService submissionConfigService; + + @Override + public SubmissionConfigService getSubmissionConfigService() throws SubmissionConfigReaderException { + return submissionConfigService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigration.java index 362f2720bb73..db1fdcdd1924 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigration.java @@ -64,12 +64,6 @@ public class SubmissionFormsMigration extends DSpaceRunnable"; private List tempFiles = new ArrayList<>(); - /** - * We need to force this, because some dependency elsewhere interferes. - */ - private static final String TRANSFORMER_FACTORY_CLASS - = "org.apache.xalan.processor.TransformerFactoryImpl"; - @Override public void internalRun() throws TransformerException { if (help) { @@ -101,8 +95,7 @@ private void transform(String sourceFilePath, String xsltFilePath, String output Result result = new StreamResult(new File(outputPath)); // Create an instance of TransformerFactory - TransformerFactory transformerFactory = TransformerFactory.newInstance( - TRANSFORMER_FACTORY_CLASS, null); + TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer trans; try { diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java index ca0c93fbe7de..894d3491a181 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.submit.migration; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link SubmissionFormsMigration} script @@ -23,9 +18,6 @@ public class SubmissionFormsMigrationCliScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,26 +30,14 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { Options options = new Options(); options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location"); - options.getOption("f").setType(String.class); options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location"); - options.getOption("s").setType(String.class); options.addOption("h", "help", false, "help"); - options.getOption("h").setType(boolean.class); super.options = options; } diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java index af3574da699e..6d9f3198fe26 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationScriptConfiguration.java @@ -7,7 +7,12 @@ */ package org.dspace.submit.migration; +import java.util.List; + +import org.apache.commons.cli.Options; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; +import org.dspace.scripts.configuration.ScriptConfiguration; /** * Subclass of {@link SubmissionFormsMigrationCliScriptConfiguration} to be use in rest/scripts.xml configuration so @@ -15,10 +20,37 @@ * * @author Maria Verdonck (Atmire) on 05/01/2021 */ -public class SubmissionFormsMigrationScriptConfiguration extends SubmissionFormsMigrationCliScriptConfiguration { +public class SubmissionFormsMigrationScriptConfiguration + extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public Class getDspaceRunnableClass() { + return this.dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location"); + options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location"); + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { // Script is not allowed to be executed from REST side return false; } diff --git a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionConfiguration.java new file mode 100644 index 000000000000..3348b1bc7f69 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionConfiguration.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.model; +import java.util.List; + +/** + * Simple bean used to configure the access conditions section + * in particular the set of available policies + * and the possibility to modify discoverability. + * +* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) +*/ +public class AccessConditionConfiguration { + + private String name; + private Boolean canChangeDiscoverable; + private List options; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Boolean getCanChangeDiscoverable() { + return canChangeDiscoverable; + } + + public void setCanChangeDiscoverable(Boolean canChangeDiscoverable) { + this.canChangeDiscoverable = canChangeDiscoverable; + } + + public List getOptions() { + return options; + } + + public void setOptions(List options) { + this.options = options; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionConfigurationService.java b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionConfigurationService.java new file mode 100644 index 000000000000..cc3f9b1dee67 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionConfigurationService.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.model; + +import java.util.List; + +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Simple bean to manage different Access Condition configurations + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class AccessConditionConfigurationService { + + @Autowired + private List accessConditionConfigurations; + + public AccessConditionConfiguration getAccessConfigurationById(String name) { + return accessConditionConfigurations.stream().filter(x -> name.equals(x.getName())).findFirst().get(); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java index b9e9f84438f7..e5cd86f50458 100644 --- a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java +++ b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java @@ -6,20 +6,24 @@ * http://www.dspace.org/license/ */ package org.dspace.submit.model; - import java.sql.SQLException; import java.text.ParseException; import java.util.Date; +import java.util.Objects; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.Bitstream; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.DSpaceObject; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.util.DateMathParser; +import org.dspace.util.TimeHelpers; import org.springframework.beans.factory.annotation.Autowired; /** @@ -27,9 +31,8 @@ * set permission on a file. An option is defined by a name such as "open * access", "embargo", "restricted access", etc. and some optional attributes to * better clarify the constraints and input available to the user. For instance - * an embargo option could allow to set a start date not longer than 3 years, - * etc - * + * an embargo option could allow to set a start date not longer than 3 years. + * * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it) */ public class AccessConditionOption { @@ -40,9 +43,12 @@ public class AccessConditionOption { @Autowired GroupService groupService; - DateMathParser dateMathParser = new DateMathParser(); + @Autowired + private ResourcePolicyService resourcePolicyService; + + private static final Logger LOG = LogManager.getLogger(); - /** An unique name identifying the access contion option **/ + /** A unique name identifying the access condition option. **/ private String name; /** @@ -134,61 +140,107 @@ public void setEndDateLimit(String endDateLimit) { } /** - * Create a new resource policy for a bitstream + * Create a new resource policy for a DSpaceObject * @param context DSpace context - * @param b bitstream for which resource policy is created + * @param obj DSpaceObject for which resource policy is created * @param name name of the resource policy * @param description description of the resource policy * @param startDate start date of the resource policy. If {@link #getHasStartDate()} returns false, * startDate should be null. Otherwise startDate may not be null. * @param endDate end date of the resource policy. If {@link #getHasEndDate()} returns false, * endDate should be null. Otherwise endDate may not be null. + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + * @throws ParseException passed through (indicates problem with a date). */ - public void createResourcePolicy(Context context, Bitstream b, String name, String description, + public void createResourcePolicy(Context context, DSpaceObject obj, String name, String description, Date startDate, Date endDate) throws SQLException, AuthorizeException, ParseException { - if (getHasStartDate() && startDate == null) { + validateResourcePolicy(context, name, startDate, endDate); + Group group = groupService.findByName(context, getGroupName()); + authorizeService.createResourcePolicy(context, obj, group, null, Constants.READ, + ResourcePolicy.TYPE_CUSTOM, name, description, startDate, + endDate); + } + + /** + * Validate ResourcePolicy and after update it + * + * @param context DSpace context + * @param resourcePolicy ResourcePolicy to update + * @throws SQLException If database error + * @throws AuthorizeException If authorize error + * @throws ParseException If parser error + */ + public void updateResourcePolicy(Context context, ResourcePolicy resourcePolicy) + throws SQLException, AuthorizeException, ParseException { + validateResourcePolicy(context, resourcePolicy.getRpName(), + resourcePolicy.getStartDate(), resourcePolicy.getEndDate()); + resourcePolicyService.update(context, resourcePolicy); + } + + /** + * Validate the policy properties, throws exceptions if any is not valid. + * + * @param context DSpace context. + * @param name Name of the resource policy. + * @param startDate Start date of the resource policy. If + * {@link #getHasStartDate()} returns false, startDate + * should be null. Otherwise startDate may not be null. + * @param endDate End date of the resource policy. If + * {@link #getHasEndDate()} returns false, endDate should + * be null. Otherwise endDate may not be null. + * @throws IllegalStateException if a date is required and absent, + * a date is not required and present, or a date exceeds its + * configured maximum. + * @throws ParseException passed through. + */ + public void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) + throws IllegalStateException, ParseException { + LOG.debug("Validate policy dates: name '{}', startDate {}, endDate {}", + name, startDate, endDate); + if (getHasStartDate() && Objects.isNull(startDate)) { throw new IllegalStateException("The access condition " + getName() + " requires a start date."); } - if (getHasEndDate() && endDate == null) { + if (getHasEndDate() && Objects.isNull(endDate)) { throw new IllegalStateException("The access condition " + getName() + " requires an end date."); } - if (!getHasStartDate() && startDate != null) { + if (!getHasStartDate() && Objects.nonNull(startDate)) { throw new IllegalStateException("The access condition " + getName() + " cannot contain a start date."); } - if (!getHasEndDate() && endDate != null) { + if (!getHasEndDate() && Objects.nonNull(endDate)) { throw new IllegalStateException("The access condition " + getName() + " cannot contain an end date."); } + DateMathParser dateMathParser = new DateMathParser(); + Date latestStartDate = null; - if (getStartDateLimit() != null) { - latestStartDate = dateMathParser.parseMath(getStartDateLimit()); + if (Objects.nonNull(getStartDateLimit())) { + latestStartDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getStartDateLimit())); } Date latestEndDate = null; - if (getEndDateLimit() != null) { - latestEndDate = dateMathParser.parseMath(getEndDateLimit()); + if (Objects.nonNull(getEndDateLimit())) { + latestEndDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getEndDateLimit())); } + LOG.debug(" latestStartDate {}, latestEndDate {}", + latestStartDate, latestEndDate); // throw if startDate after latestStartDate - if (startDate != null && latestStartDate != null && startDate.after(latestStartDate)) { + if (Objects.nonNull(startDate) && Objects.nonNull(latestStartDate) && startDate.after(latestStartDate)) { throw new IllegalStateException(String.format( - "The start date of access condition %s should be earlier than %s from now.", - getName(), getStartDateLimit() + "The start date of access condition %s should be earlier than %s from now (%s).", + getName(), getStartDateLimit(), dateMathParser.getNow() )); } // throw if endDate after latestEndDate - if (endDate != null && latestEndDate != null && endDate.after(latestEndDate)) { + if (Objects.nonNull(endDate) && Objects.nonNull(latestEndDate) && endDate.after(latestEndDate)) { throw new IllegalStateException(String.format( - "The end date of access condition %s should be earlier than %s from now.", - getName(), getEndDateLimit() + "The end date of access condition %s should be earlier than %s from now (%s).", + getName(), getEndDateLimit(), dateMathParser.getNow() )); } - - Group group = groupService.findByName(context, getGroupName()); - authorizeService.createResourcePolicy(context, b, group, null, Constants.READ, - ResourcePolicy.TYPE_CUSTOM, name, description, startDate, - endDate); } -} + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/submit/model/UploadConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/model/UploadConfiguration.java index bc2f117b3c82..a6421b3f7adb 100644 --- a/dspace-api/src/main/java/org/dspace/submit/model/UploadConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/submit/model/UploadConfiguration.java @@ -8,15 +8,17 @@ package org.dspace.submit.model; import java.util.List; +import javax.inject.Inject; import org.dspace.services.ConfigurationService; /** + * A collection of conditions to be met when uploading Bitstreams. * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it) */ public class UploadConfiguration { - private ConfigurationService configurationService; + private final ConfigurationService configurationService; private String metadataDefinition; private List options; @@ -24,22 +26,52 @@ public class UploadConfiguration { private Boolean required; private String name; + /** + * Construct a bitstream uploading configuration. + * @param configurationService DSpace configuration provided by the DI container. + */ + @Inject + public UploadConfiguration(ConfigurationService configurationService) { + this.configurationService = configurationService; + } + + /** + * The list of access restriction types from which a submitter may choose. + * @return choices for restricting access to Bitstreams. + */ public List getOptions() { return options; } + /** + * Set the list of access restriction types from which to choose. + * Required. May be empty. + * @param options choices for restricting access to Bitstreams. + */ public void setOptions(List options) { this.options = options; } + /** + * Name of the submission form to which these conditions are attached. + * @return the form's name. + */ public String getMetadata() { return metadataDefinition; } + /** + * Name the submission form to which these conditions are attached. + * @param metadata the form's name. + */ public void setMetadata(String metadata) { this.metadataDefinition = metadata; } + /** + * Limit on the maximum size of an uploaded Bitstream. + * @return maximum upload size in bytes. + */ public Long getMaxSize() { if (maxSize == null) { maxSize = configurationService.getLongProperty("upload.max"); @@ -47,10 +79,18 @@ public Long getMaxSize() { return maxSize; } + /** + * Limit the maximum size of an uploaded Bitstream. + * @param maxSize maximum upload size in bytes. + */ public void setMaxSize(Long maxSize) { this.maxSize = maxSize; } + /** + * Is at least one Bitstream required when submitting a new Item? + * @return true if a Bitstream is required. + */ public Boolean isRequired() { if (required == null) { //defaults to true @@ -60,25 +100,27 @@ public Boolean isRequired() { return required; } + /** + * Is at least one Bitstream required when submitting a new Item? + * @param required true if a Bitstream is required. + */ public void setRequired(Boolean required) { this.required = required; } - public ConfigurationService getConfigurationService() { - return configurationService; - } - - public void setConfigurationService(ConfigurationService configurationService) { - this.configurationService = configurationService; - } - + /** + * The unique name of this configuration. + * @return configuration's name. + */ public String getName() { return name; } + /** + * Give this configuration a unique name. Required. + * @param name configuration's name. + */ public void setName(String name) { this.name = name; } - - } diff --git a/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigService.java b/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigService.java new file mode 100644 index 000000000000..c4b111a38f7e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigService.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.service; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.app.util.SubmissionConfig; +import org.dspace.app.util.SubmissionConfigReaderException; +import org.dspace.app.util.SubmissionStepConfig; +import org.dspace.content.Collection; +import org.dspace.core.Context; + +/** + * Item Submission Configuration Service + * enables interaction with a submission config like + * getting a config by a collection name or handle + * as also retrieving submission configuration steps + * + * @author paulo.graca at fccn.pt + */ +public interface SubmissionConfigService { + + public void reload() throws SubmissionConfigReaderException; + + public String getDefaultSubmissionConfigName(); + + public List getAllSubmissionConfigs(Integer limit, Integer offset); + + public int countSubmissionConfigs(); + + public SubmissionConfig getSubmissionConfigByCollection(String collectionHandle); + + public SubmissionConfig getSubmissionConfigByName(String submitName); + + public SubmissionStepConfig getStepConfig(String stepID) + throws SubmissionConfigReaderException; + + public List getCollectionsBySubmissionConfig(Context context, String submitName) + throws IllegalStateException, SQLException, SubmissionConfigReaderException; + +} diff --git a/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigServiceImpl.java b/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigServiceImpl.java new file mode 100644 index 000000000000..a72bcc2c3bf9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/submit/service/SubmissionConfigServiceImpl.java @@ -0,0 +1,80 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.submit.service; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.app.util.SubmissionConfig; +import org.dspace.app.util.SubmissionConfigReader; +import org.dspace.app.util.SubmissionConfigReaderException; +import org.dspace.app.util.SubmissionStepConfig; +import org.dspace.content.Collection; +import org.dspace.core.Context; +import org.springframework.beans.factory.InitializingBean; + +/** + * An implementation for Submission Config service + * + * @author paulo.graca at fccn.pt + */ +public class SubmissionConfigServiceImpl implements SubmissionConfigService, InitializingBean { + + protected SubmissionConfigReader submissionConfigReader; + + public SubmissionConfigServiceImpl () throws SubmissionConfigReaderException { + submissionConfigReader = new SubmissionConfigReader(); + } + + @Override + public void afterPropertiesSet() throws Exception { + submissionConfigReader.reload(); + } + + @Override + public void reload() throws SubmissionConfigReaderException { + submissionConfigReader.reload(); + } + + @Override + public String getDefaultSubmissionConfigName() { + return submissionConfigReader.getDefaultSubmissionConfigName(); + } + + @Override + public List getAllSubmissionConfigs(Integer limit, Integer offset) { + return submissionConfigReader.getAllSubmissionConfigs(limit, offset); + } + + @Override + public int countSubmissionConfigs() { + return submissionConfigReader.countSubmissionConfigs(); + } + + @Override + public SubmissionConfig getSubmissionConfigByCollection(String collectionHandle) { + return submissionConfigReader.getSubmissionConfigByCollection(collectionHandle); + } + + @Override + public SubmissionConfig getSubmissionConfigByName(String submitName) { + return submissionConfigReader.getSubmissionConfigByName(submitName); + } + + @Override + public SubmissionStepConfig getStepConfig(String stepID) throws SubmissionConfigReaderException { + return submissionConfigReader.getStepConfig(stepID); + } + + @Override + public List getCollectionsBySubmissionConfig(Context context, String submitName) + throws IllegalStateException, SQLException { + return submissionConfigReader.getCollectionsBySubmissionConfig(context, submitName); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java new file mode 100644 index 000000000000..c3035614343b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/ContentGenerator.java @@ -0,0 +1,103 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.apache.commons.lang.StringUtils.EMPTY; + +import java.io.ByteArrayOutputStream; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.content.crosswalk.StreamDisseminationCrosswalk; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.core.Email; +import org.dspace.core.I18nUtil; +import org.dspace.discovery.IndexableObject; +import org.dspace.eperson.EPerson; +import org.dspace.subscriptions.service.SubscriptionGenerator; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation class of SubscriptionGenerator + * which will handle the logic of sending the emails + * in case of 'content' subscriptionType + */ +@SuppressWarnings("rawtypes") +public class ContentGenerator implements SubscriptionGenerator { + + private final Logger log = LogManager.getLogger(ContentGenerator.class); + + @SuppressWarnings("unchecked") + private Map entityType2Disseminator = new HashMap(); + + @Autowired + private ItemService itemService; + + @Override + public void notifyForSubscriptions(Context context, EPerson ePerson, + List indexableComm, + List indexableColl) { + try { + if (Objects.nonNull(ePerson)) { + Locale supportedLocale = I18nUtil.getEPersonLocale(ePerson); + Email email = Email.getEmail(I18nUtil.getEmailFilename(supportedLocale, "subscriptions_content")); + email.addRecipient(ePerson.getEmail()); + + String bodyCommunities = generateBodyMail(context, indexableComm); + String bodyCollections = generateBodyMail(context, indexableColl); + if (bodyCommunities.equals(EMPTY) && bodyCollections.equals(EMPTY)) { + log.debug("subscription(s) of eperson {} do(es) not match any new items: nothing to send" + + " - exit silently", ePerson::getID); + return; + } + email.addArgument(bodyCommunities); + email.addArgument(bodyCollections); + email.send(); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + log.warn("Cannot email user eperson_id: {} eperson_email: {}", ePerson::getID, ePerson::getEmail); + } + } + + private String generateBodyMail(Context context, List indexableObjects) { + if (indexableObjects == null || indexableObjects.isEmpty()) { + return EMPTY; + } + try { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + out.write("\n".getBytes(UTF_8)); + for (IndexableObject indexableObject : indexableObjects) { + out.write("\n".getBytes(UTF_8)); + Item item = (Item) indexableObject.getIndexedObject(); + String entityType = itemService.getEntityTypeLabel(item); + Optional.ofNullable(entityType2Disseminator.get(entityType)) + .orElseGet(() -> entityType2Disseminator.get("Item")) + .disseminate(context, item, out); + } + return out.toString(); + } catch (Exception e) { + log.error(e.getMessage(), e); + } + return EMPTY; + } + + public void setEntityType2Disseminator(Map entityType2Disseminator) { + this.entityType2Disseminator = entityType2Disseminator; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotification.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotification.java new file mode 100644 index 000000000000..b429ecbd46e7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotification.java @@ -0,0 +1,94 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions; + +import java.sql.SQLException; +import java.util.Objects; +import java.util.UUID; + +import org.apache.commons.cli.ParseException; +import org.apache.commons.lang3.StringUtils; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.FrequencyType; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.utils.DSpace; + +/** + * Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them + * + * @author alba aliu + */ +public class SubscriptionEmailNotification + extends DSpaceRunnable> { + + private Context context; + private SubscriptionEmailNotificationService subscriptionEmailNotificationService; + + @Override + @SuppressWarnings("unchecked") + public SubscriptionEmailNotificationConfiguration getScriptConfiguration() { + return new DSpace().getServiceManager().getServiceByName("subscription-send", + SubscriptionEmailNotificationConfiguration.class); + } + + @Override + public void setup() throws ParseException { + this.subscriptionEmailNotificationService = new DSpace().getServiceManager().getServiceByName( + SubscriptionEmailNotificationServiceImpl.class.getName(), SubscriptionEmailNotificationServiceImpl.class); + } + + @Override + public void internalRun() throws Exception { + assignCurrentUserInContext(); + assignSpecialGroupsInContext(); + String frequencyOption = commandLine.getOptionValue("f"); + if (StringUtils.isBlank(frequencyOption)) { + throw new IllegalArgumentException("Option --frequency (-f) must be set"); + } + + if (!FrequencyType.isSupportedFrequencyType(frequencyOption)) { + throw new IllegalArgumentException( + "Option f must be one of following values D(Day), W(Week) or M(Month)"); + } + subscriptionEmailNotificationService.perform(getContext(), handler, "content", frequencyOption); + } + + private void assignCurrentUserInContext() throws SQLException { + context = new Context(); + UUID uuid = getEpersonIdentifier(); + if (Objects.nonNull(uuid)) { + EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid); + context.setCurrentUser(ePerson); + } + } + + private void assignSpecialGroupsInContext() throws SQLException { + for (UUID uuid : handler.getSpecialGroups()) { + context.setSpecialGroup(uuid); + } + } + + public SubscriptionEmailNotificationService getSubscriptionEmailNotificationService() { + return subscriptionEmailNotificationService; + } + + public void setSubscriptionEmailNotificationService(SubscriptionEmailNotificationService notificationService) { + this.subscriptionEmailNotificationService = notificationService; + } + + public Context getContext() { + return context; + } + + public void setContext(Context context) { + this.context = context; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationCli.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationCli.java new file mode 100644 index 000000000000..338e7ff0e18b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationCli.java @@ -0,0 +1,15 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions; + +/** + * Extension of {@link SubscriptionEmailNotification} for CLI. + */ +public class SubscriptionEmailNotificationCli extends SubscriptionEmailNotification { + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationCliScriptConfiguration.java new file mode 100644 index 000000000000..f0eb2fd5c83e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationCliScriptConfiguration.java @@ -0,0 +1,16 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions; + +/** + * Extension of {@link SubscriptionEmailNotificationCli} for CLI. + */ +public class SubscriptionEmailNotificationCliScriptConfiguration + extends SubscriptionEmailNotificationConfiguration { + +} diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java new file mode 100644 index 000000000000..dd61fab9671c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.subscriptions; + +import java.util.Objects; + +import org.apache.commons.cli.Options; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.configuration.ScriptConfiguration; + +/** + * Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them + */ +public class SubscriptionEmailNotificationConfiguration extends ScriptConfiguration { + + private Class dspaceRunnableClass; + + @Override + public Options getOptions() { + if (Objects.isNull(options)) { + Options options = new Options(); + options.addOption("f", "frequency", true, + "Subscription frequency. Valid values include: D (Day), W (Week) and M (Month)"); + options.getOption("f").setRequired(true); + super.options = options; + } + return options; + } + + @Override + public Class getDspaceRunnableClass() { + return dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java new file mode 100644 index 000000000000..95272235095a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationService.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions; + +import java.util.Set; + +import org.dspace.core.Context; +import org.dspace.scripts.handler.DSpaceRunnableHandler; + +/** + * Service interface class for the subscription e-mail notification services + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public interface SubscriptionEmailNotificationService { + + /** + * Performs sending of e-mails to subscribers by frequency value and SubscriptionType + * + * @param context DSpace context object + * @param handler Applicable DSpaceRunnableHandler + * @param subscriptionType Currently supported only "content" + * @param frequency Valid values include: D (Day), W (Week) and M (Month) + */ + public void perform(Context context, DSpaceRunnableHandler handler, String subscriptionType, String frequency); + + /** + * returns a set of supported SubscriptionTypes + */ + public Set getSupportedSubscriptionTypes(); + +} diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java new file mode 100644 index 000000000000..8fb01cd36e92 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationServiceImpl.java @@ -0,0 +1,172 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions; + +import static org.dspace.core.Constants.COLLECTION; +import static org.dspace.core.Constants.COMMUNITY; +import static org.dspace.core.Constants.READ; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.IndexableObject; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Subscription; +import org.dspace.eperson.service.SubscribeService; +import org.dspace.scripts.DSpaceRunnable; +import org.dspace.scripts.handler.DSpaceRunnableHandler; +import org.dspace.subscriptions.service.DSpaceObjectUpdates; +import org.dspace.subscriptions.service.SubscriptionGenerator; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them + * + * @author alba aliu + */ +public class SubscriptionEmailNotificationServiceImpl implements SubscriptionEmailNotificationService { + + private static final Logger log = LogManager.getLogger(SubscriptionEmailNotificationServiceImpl.class); + + private Map contentUpdates = new HashMap<>(); + @SuppressWarnings("rawtypes") + private Map subscriptionType2generators = new HashMap<>(); + + @Autowired + private AuthorizeService authorizeService; + @Autowired + private SubscribeService subscribeService; + + @SuppressWarnings("rawtypes") + public SubscriptionEmailNotificationServiceImpl(Map contentUpdates, + Map subscriptionType2generators) { + this.contentUpdates = contentUpdates; + this.subscriptionType2generators = subscriptionType2generators; + } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + public void perform(Context context, DSpaceRunnableHandler handler, String subscriptionType, String frequency) { + List communityItems = new ArrayList<>(); + List collectionsItems = new ArrayList<>(); + try { + List subscriptions = + findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, frequency); + // Here is verified if SubscriptionType is "content" Or "statistics" as them are configured + if (subscriptionType2generators.keySet().contains(subscriptionType)) { + // the list of the person who has subscribed + int iterator = 0; + for (Subscription subscription : subscriptions) { + DSpaceObject dSpaceObject = subscription.getDSpaceObject(); + EPerson ePerson = subscription.getEPerson(); + + if (!authorizeService.authorizeActionBoolean(context, ePerson, dSpaceObject, READ, true)) { + iterator++; + continue; + } + + if (dSpaceObject.getType() == COMMUNITY) { + List indexableCommunityItems = contentUpdates + .get(Community.class.getSimpleName().toLowerCase()) + .findUpdates(context, dSpaceObject, frequency); + communityItems.addAll(getItems(context, ePerson, indexableCommunityItems)); + } else if (dSpaceObject.getType() == COLLECTION) { + List indexableCollectionItems = contentUpdates + .get(Collection.class.getSimpleName().toLowerCase()) + .findUpdates(context, dSpaceObject, frequency); + collectionsItems.addAll(getItems(context, ePerson, indexableCollectionItems)); + } else { + log.warn("found an invalid DSpace Object type ({}) among subscriptions to send", + dSpaceObject.getType()); + continue; + } + + if (iterator < subscriptions.size() - 1) { + // as the subscriptions are ordered by eperson id, so we send them by ePerson + if (ePerson.equals(subscriptions.get(iterator + 1).getEPerson())) { + iterator++; + continue; + } else { + subscriptionType2generators.get(subscriptionType) + .notifyForSubscriptions(context, ePerson, communityItems, collectionsItems); + communityItems.clear(); + collectionsItems.clear(); + } + } else { + //in the end of the iteration + subscriptionType2generators.get(subscriptionType) + .notifyForSubscriptions(context, ePerson, communityItems, collectionsItems); + } + iterator++; + } + } else { + throw new IllegalArgumentException("Currently this SubscriptionType:" + subscriptionType + + " is not supported!"); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + handler.handleException(e); + context.abort(); + } + } + + @SuppressWarnings("rawtypes") + private List getItems(Context context, EPerson ePerson, List indexableItems) + throws SQLException { + List items = new ArrayList(); + for (IndexableObject indexableitem : indexableItems) { + Item item = (Item) indexableitem.getIndexedObject(); + if (authorizeService.authorizeActionBoolean(context, ePerson, item, READ, true)) { + items.add(indexableitem); + } + } + return items; + } + + /** + * Return all Subscriptions by subscriptionType and frequency ordered by ePerson ID + * if there are none it returns an empty list + * + * @param context DSpace context + * @param subscriptionType Could be "content" or "statistics". NOTE: in DSpace we have only "content" + * @param frequency Could be "D" stand for Day, "W" stand for Week, and "M" stand for Month + * @return + */ + private List findAllSubscriptionsBySubscriptionTypeAndFrequency(Context context, + String subscriptionType, String frequency) { + try { + return subscribeService.findAllSubscriptionsBySubscriptionTypeAndFrequency(context, subscriptionType, + frequency) + .stream() + .sorted(Comparator.comparing(s -> s.getEPerson().getID())) + .collect(Collectors.toList()); + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + return new ArrayList(); + } + + @Override + public Set getSupportedSubscriptionTypes() { + return subscriptionType2generators.keySet(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/objectupdates/CollectionUpdates.java b/dspace-api/src/main/java/org/dspace/subscriptions/objectupdates/CollectionUpdates.java new file mode 100644 index 000000000000..12d056f36800 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/objectupdates/CollectionUpdates.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions.objectupdates; + +import java.util.List; + +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.eperson.FrequencyType; +import org.dspace.subscriptions.service.DSpaceObjectUpdates; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Class which will be used to find + * all collection objects updated related with subscribed DSO + * + * @author Alba Aliu + */ +public class CollectionUpdates implements DSpaceObjectUpdates { + + @Autowired + private SearchService searchService; + + @Override + @SuppressWarnings("rawtypes") + public List findUpdates(Context context, DSpaceObject dSpaceObject, String frequency) + throws SearchServiceException { + DiscoverQuery discoverQuery = new DiscoverQuery(); + getDefaultFilterQueries().stream().forEach(fq -> discoverQuery.addFilterQueries(fq)); + discoverQuery.addFilterQueries("location.coll:(" + dSpaceObject.getID() + ")"); + discoverQuery.addFilterQueries("lastModified:" + FrequencyType.findLastFrequency(frequency)); + DiscoverResult discoverResult = searchService.search(context, discoverQuery); + return discoverResult.getIndexableObjects(); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/objectupdates/CommunityUpdates.java b/dspace-api/src/main/java/org/dspace/subscriptions/objectupdates/CommunityUpdates.java new file mode 100644 index 000000000000..0ae80d287aad --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/objectupdates/CommunityUpdates.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions.objectupdates; + +import java.util.List; + +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.eperson.FrequencyType; +import org.dspace.subscriptions.service.DSpaceObjectUpdates; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Class which will be used to find + * all community objects updated related with subscribed DSO + * + * @author Alba Aliu + */ +public class CommunityUpdates implements DSpaceObjectUpdates { + + @Autowired + private SearchService searchService; + + @Override + @SuppressWarnings("rawtypes") + public List findUpdates(Context context, DSpaceObject dSpaceObject, String frequency) + throws SearchServiceException { + DiscoverQuery discoverQuery = new DiscoverQuery(); + getDefaultFilterQueries().stream().forEach(fq -> discoverQuery.addFilterQueries(fq)); + discoverQuery.addFilterQueries("location.comm:(" + dSpaceObject.getID() + ")"); + discoverQuery.addFilterQueries("lastModified:" + FrequencyType.findLastFrequency(frequency)); + DiscoverResult discoverResult = searchService.search(context, discoverQuery); + return discoverResult.getIndexableObjects(); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/service/DSpaceObjectUpdates.java b/dspace-api/src/main/java/org/dspace/subscriptions/service/DSpaceObjectUpdates.java new file mode 100644 index 000000000000..ec09b2a45fa4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/service/DSpaceObjectUpdates.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions.service; + +import java.util.Arrays; +import java.util.List; + +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchServiceException; + +/** + * Interface class which will be used to find all objects updated related with subscribed DSO + * + * @author Alba Aliu + */ +public interface DSpaceObjectUpdates { + + /** + * Send an email to some addresses, concerning a Subscription, using a given dso. + * + * @param context current DSpace session. + */ + @SuppressWarnings("rawtypes") + public List findUpdates(Context context, DSpaceObject dSpaceObject, String frequency) + throws SearchServiceException; + + default List getDefaultFilterQueries() { + return Arrays.asList("search.resourcetype:" + Item.class.getSimpleName(), + "-discoverable:" + false, + "-withdrawn:" + true); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/service/SubscriptionGenerator.java b/dspace-api/src/main/java/org/dspace/subscriptions/service/SubscriptionGenerator.java new file mode 100644 index 000000000000..1790513b9b79 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/subscriptions/service/SubscriptionGenerator.java @@ -0,0 +1,25 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.subscriptions.service; + +import java.util.List; + +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; + +/** + * Interface Class which will be used to send email notifications to ePerson + * containing information for all list of objects. + * + * @author Alba Aliu + */ +public interface SubscriptionGenerator { + + public void notifyForSubscriptions(Context c, EPerson ePerson, List comm, List coll); + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/supervision/SupervisionOrder.java b/dspace-api/src/main/java/org/dspace/supervision/SupervisionOrder.java new file mode 100644 index 000000000000..52d5dacb74bb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/supervision/SupervisionOrder.java @@ -0,0 +1,78 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.supervision; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.SequenceGenerator; +import javax.persistence.Table; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.ReloadableEntity; +import org.dspace.eperson.Group; +import org.dspace.supervision.service.SupervisionOrderService; + +/** + * Database entity representation of the supervision_orders table + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +@Entity +@Table(name = "supervision_orders") +public class SupervisionOrder implements ReloadableEntity { + + @Id + @Column(name = "id") + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "supervision_orders_seq") + @SequenceGenerator(name = "supervision_orders_seq", sequenceName = "supervision_orders_seq", allocationSize = 1) + private Integer id; + + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "item_id") + private Item item; + + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "eperson_group_id") + private Group group; + + /** + * Protected constructor, create object using: + * {@link SupervisionOrderService#create(Context, Item, Group)} + */ + protected SupervisionOrder() { + + } + + @Override + public Integer getID() { + return id; + } + + public Item getItem() { + return item; + } + + public void setItem(Item item) { + this.item = item; + } + + public Group getGroup() { + return group; + } + + public void setGroup(Group group) { + this.group = group; + } +} diff --git a/dspace-api/src/main/java/org/dspace/supervision/SupervisionOrderServiceImpl.java b/dspace-api/src/main/java/org/dspace/supervision/SupervisionOrderServiceImpl.java new file mode 100644 index 000000000000..21a54f085f61 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/supervision/SupervisionOrderServiceImpl.java @@ -0,0 +1,126 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.supervision; + +import java.sql.SQLException; +import java.util.List; + +import org.apache.commons.collections4.CollectionUtils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.event.Event; +import org.dspace.supervision.dao.SupervisionOrderDao; +import org.dspace.supervision.service.SupervisionOrderService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link SupervisionOrderService} + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public class SupervisionOrderServiceImpl implements SupervisionOrderService { + + @Autowired(required = true) + private SupervisionOrderDao supervisionDao; + + @Autowired(required = true) + private GroupService groupService; + + @Autowired(required = true) + private ItemService itemService; + + protected SupervisionOrderServiceImpl() { + + } + + @Override + public SupervisionOrder create(Context context) throws SQLException, AuthorizeException { + return supervisionDao.create(context, new SupervisionOrder()); + } + + @Override + public SupervisionOrder find(Context context, int id) throws SQLException { + return supervisionDao.findByID(context, SupervisionOrder.class, id); + } + + @Override + public void update(Context context, SupervisionOrder supervisionOrder) + throws SQLException, AuthorizeException { + supervisionDao.save(context, supervisionOrder); + } + + @Override + public void update(Context context, List supervisionOrders) + throws SQLException, AuthorizeException { + if (CollectionUtils.isNotEmpty(supervisionOrders)) { + for (SupervisionOrder supervisionOrder : supervisionOrders) { + supervisionDao.save(context, supervisionOrder); + } + } + } + + @Override + public void delete(Context context, SupervisionOrder supervisionOrder) throws SQLException, AuthorizeException { + supervisionDao.delete(context, supervisionOrder); + } + + @Override + public SupervisionOrder create(Context context, Item item, Group group) throws SQLException { + SupervisionOrder supervisionOrder = new SupervisionOrder(); + supervisionOrder.setItem(item); + supervisionOrder.setGroup(group); + SupervisionOrder supOrder = supervisionDao.create(context, supervisionOrder); + context.addEvent(new Event(Event.MODIFY, Constants.ITEM, item.getID(), null, + itemService.getIdentifiers(context, item))); + return supOrder; + } + + @Override + public List findAll(Context context) throws SQLException { + return supervisionDao.findAll(context, SupervisionOrder.class); + } + + @Override + public List findByItem(Context context, Item item) throws SQLException { + return supervisionDao.findByItem(context, item); + } + + @Override + public SupervisionOrder findByItemAndGroup(Context context, Item item, Group group) throws SQLException { + return supervisionDao.findByItemAndGroup(context, item, group); + } + + @Override + public boolean isSupervisor(Context context, EPerson ePerson, Item item) throws SQLException { + List supervisionOrders = findByItem(context, item); + + if (CollectionUtils.isEmpty(supervisionOrders)) { + return false; + } + + return supervisionOrders + .stream() + .map(SupervisionOrder::getGroup) + .anyMatch(group -> isMember(context, ePerson, group)); + } + + private boolean isMember(Context context, EPerson ePerson, Group group) { + try { + return groupService.isMember(context, ePerson, group); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/supervision/dao/SupervisionOrderDao.java b/dspace-api/src/main/java/org/dspace/supervision/dao/SupervisionOrderDao.java new file mode 100644 index 000000000000..2dd5dad12a4d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/supervision/dao/SupervisionOrderDao.java @@ -0,0 +1,50 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.supervision.dao; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.GenericDAO; +import org.dspace.eperson.Group; +import org.dspace.supervision.SupervisionOrder; + +/** + * Database Access Object interface class for the SupervisionOrder object. + * + * The implementation of this class is responsible for all database calls for the SupervisionOrder object + * and is autowired by spring + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public interface SupervisionOrderDao extends GenericDAO { + + /** + * find all Supervision Orders related to the item + * + * @param context The DSpace context + * @param item the item + * @return the Supervision Orders related to the item + * @throws SQLException If something goes wrong in the database + */ + List findByItem(Context context, Item item) throws SQLException; + + /** + * find the Supervision Order related to the item and group + * + * @param context The DSpace context + * @param item the item + * @param group the group + * @return the Supervision Order related to the item and group + * @throws SQLException If something goes wrong in the database + */ + SupervisionOrder findByItemAndGroup(Context context, Item item, Group group) throws SQLException; + +} diff --git a/dspace-api/src/main/java/org/dspace/supervision/dao/impl/SupervisionOrderDaoImpl.java b/dspace-api/src/main/java/org/dspace/supervision/dao/impl/SupervisionOrderDaoImpl.java new file mode 100644 index 000000000000..09cd0841e78f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/supervision/dao/impl/SupervisionOrderDaoImpl.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.supervision.dao.impl; + +import java.sql.SQLException; +import java.util.List; +import javax.persistence.criteria.CriteriaBuilder; +import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Root; + +import org.dspace.content.Item; +import org.dspace.core.AbstractHibernateDAO; +import org.dspace.core.Context; +import org.dspace.eperson.Group; +import org.dspace.supervision.SupervisionOrder; +import org.dspace.supervision.SupervisionOrder_; +import org.dspace.supervision.dao.SupervisionOrderDao; + +/** + * Hibernate implementation of the Database Access Object interface class for the SupervisionOrder object. + * This class is responsible for all database calls for the SupervisionOrder object + * and is autowired by spring + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public class SupervisionOrderDaoImpl extends AbstractHibernateDAO implements SupervisionOrderDao { + + @Override + public List findByItem(Context context, Item item) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SupervisionOrder.class); + + Root supervisionOrderRoot = criteriaQuery.from(SupervisionOrder.class); + criteriaQuery.select(supervisionOrderRoot); + criteriaQuery.where(criteriaBuilder.equal(supervisionOrderRoot.get(SupervisionOrder_.item), item)); + + return list(context, criteriaQuery, false, SupervisionOrder.class, -1, -1); + } + + @Override + public SupervisionOrder findByItemAndGroup(Context context, Item item, Group group) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, SupervisionOrder.class); + + Root supervisionOrderRoot = criteriaQuery.from(SupervisionOrder.class); + criteriaQuery.select(supervisionOrderRoot); + criteriaQuery.where(criteriaBuilder.and( + criteriaBuilder.equal(supervisionOrderRoot.get(SupervisionOrder_.item), item), + criteriaBuilder.equal(supervisionOrderRoot.get(SupervisionOrder_.group), group) + )); + + return singleResult(context, criteriaQuery); + } +} diff --git a/dspace-api/src/main/java/org/dspace/supervision/enumeration/SupervisionOrderType.java b/dspace-api/src/main/java/org/dspace/supervision/enumeration/SupervisionOrderType.java new file mode 100644 index 000000000000..4f6b888d6082 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/supervision/enumeration/SupervisionOrderType.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.supervision.enumeration; + + +/** + * This Enum holds a representation of all the possible supervision order types + *

    + * OBSERVER: grant READ permission to the supervised item + * EDITOR: grant READ and WRITE permissions to the supervised item + * NONE: no grants + *

    + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public enum SupervisionOrderType { + OBSERVER, + NONE, + EDITOR; + + public static boolean invalid(String type) { + try { + SupervisionOrderType.valueOf(type); + return false; + } catch (IllegalArgumentException ignored) { + return true; + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/supervision/factory/SupervisionOrderServiceFactory.java b/dspace-api/src/main/java/org/dspace/supervision/factory/SupervisionOrderServiceFactory.java new file mode 100644 index 000000000000..8577ee8b1613 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/supervision/factory/SupervisionOrderServiceFactory.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.supervision.factory; + +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.supervision.service.SupervisionOrderService; + +/** + * Abstract factory to get services for the supervision package, + * use SupervisionOrderServiceFactory.getInstance() to retrieve an implementation + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public abstract class SupervisionOrderServiceFactory { + + public abstract SupervisionOrderService getSupervisionOrderService(); + + public static SupervisionOrderServiceFactory getInstance() { + return DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServiceByName("supervisionOrderServiceFactory", + SupervisionOrderServiceFactory.class); + } +} diff --git a/dspace-api/src/main/java/org/dspace/supervision/factory/SupervisionOrderServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/supervision/factory/SupervisionOrderServiceFactoryImpl.java new file mode 100644 index 000000000000..407a79c6899d --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/supervision/factory/SupervisionOrderServiceFactoryImpl.java @@ -0,0 +1,28 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.supervision.factory; + +import org.dspace.supervision.service.SupervisionOrderService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Factory implementation to get services for the supervision package, + * use SupervisionOrderServiceFactory.getInstance() to retrieve an implementation + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public class SupervisionOrderServiceFactoryImpl extends SupervisionOrderServiceFactory { + + @Autowired(required = true) + private SupervisionOrderService supervisionOrderService; + + @Override + public SupervisionOrderService getSupervisionOrderService() { + return supervisionOrderService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/supervision/service/SupervisionOrderService.java b/dspace-api/src/main/java/org/dspace/supervision/service/SupervisionOrderService.java new file mode 100644 index 000000000000..0a3b6dae4b9c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/supervision/service/SupervisionOrderService.java @@ -0,0 +1,80 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.supervision.service; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.service.DSpaceCRUDService; +import org.dspace.supervision.SupervisionOrder; + +/** + * Service interface class for the SupervisionOrder object. + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public interface SupervisionOrderService extends DSpaceCRUDService { + + /** + * Creates a new SupervisionOrder + * + * @param context The DSpace context + * @param item the item + * @param group the group + * @return the created Supervision Order on item and group + * @throws SQLException If something goes wrong in the database + */ + SupervisionOrder create(Context context, Item item, Group group) throws SQLException; + + /** + * Find all supervision orders currently stored + * + * @param context The DSpace context + * @return all Supervision Orders + * @throws SQLException If something goes wrong in the database + */ + List findAll(Context context) throws SQLException; + + /** + * Find all supervision orders for a given Item + * + * @param context The DSpace context + * @param item the item + * @return all Supervision Orders related to the item + * @throws SQLException If something goes wrong in the database + */ + List findByItem(Context context, Item item) throws SQLException; + + /** + * + * Find a supervision order depending on given Item and Group + * + * @param context The DSpace context + * @param item the item + * @param group the group + * @return the Supervision Order of the item and group + * @throws SQLException If something goes wrong in the database + */ + SupervisionOrder findByItemAndGroup(Context context, Item item, Group group) throws SQLException; + + /** + * + * Checks if an EPerson is supervisor of an Item + * + * @param context The DSpace context + * @param ePerson the ePerson to be checked + * @param item the item + * @return true if the ePerson is a supervisor of the item + * @throws SQLException If something goes wrong in the database + */ + boolean isSupervisor(Context context, EPerson ePerson, Item item) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/testing/PubMedToImport.java b/dspace-api/src/main/java/org/dspace/testing/PubMedToImport.java index b7ded5ecbfc4..ec51528429a4 100644 --- a/dspace-api/src/main/java/org/dspace/testing/PubMedToImport.java +++ b/dspace-api/src/main/java/org/dspace/testing/PubMedToImport.java @@ -24,10 +24,10 @@ import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.jdom.Document; -import org.jdom.Element; -import org.jdom.output.Format; -import org.jdom.output.XMLOutputter; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.output.Format; +import org.jdom2.output.XMLOutputter; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; diff --git a/dspace-api/src/main/java/org/dspace/text/filter/InitialArticleWord.java b/dspace-api/src/main/java/org/dspace/text/filter/InitialArticleWord.java index 700b25748e8c..167b201e0f7a 100644 --- a/dspace-api/src/main/java/org/dspace/text/filter/InitialArticleWord.java +++ b/dspace-api/src/main/java/org/dspace/text/filter/InitialArticleWord.java @@ -110,7 +110,7 @@ public String filter(String str, String lang) { return str.substring(cutPos); } else { // No - move the initial article word to the end - return new StringBuffer(str.substring(cutPos)) + return new StringBuilder(str.substring(cutPos)) .append(wordSeparator) .append(str.substring(initialStart, initialEnd)) .toString(); @@ -124,10 +124,12 @@ public String filter(String str, String lang) { } protected InitialArticleWord(boolean stripWord) { + this.wordSeparator = ", "; stripInitialArticle = stripWord; } protected InitialArticleWord() { + this.wordSeparator = ", "; stripInitialArticle = false; } @@ -138,9 +140,8 @@ protected InitialArticleWord() { * @return An array of definite/indefinite article words */ protected abstract String[] getArticleWords(String lang); - // Separator to use when appending article to end - private String wordSeparator = ", "; + private final String wordSeparator; // Flag to signify initial article word should be removed // If false, then the initial article word is appended to the end diff --git a/dspace-api/src/main/java/org/dspace/usage/LoggerUsageEventListener.java b/dspace-api/src/main/java/org/dspace/usage/LoggerUsageEventListener.java index aa24db077596..246239abbb65 100644 --- a/dspace-api/src/main/java/org/dspace/usage/LoggerUsageEventListener.java +++ b/dspace-api/src/main/java/org/dspace/usage/LoggerUsageEventListener.java @@ -11,7 +11,7 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.core.Constants; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.services.model.Event; import org.dspace.usage.UsageEvent.Action; @@ -33,7 +33,7 @@ public void receiveEvent(Event event) { if (event instanceof UsageEvent && !(event instanceof UsageSearchEvent)) { UsageEvent ue = (UsageEvent) event; - log.info(LogManager.getHeader( + log.info(LogHelper.getHeader( ue.getContext(), formatAction(ue.getAction(), ue.getObject()), formatMessage(ue.getObject())) diff --git a/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java b/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java index ed137e9d6d8c..ec9a2b12641a 100644 --- a/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java +++ b/dspace-api/src/main/java/org/dspace/usage/UsageEvent.java @@ -65,6 +65,8 @@ String text() { private Action action; + private String referrer; + private static String checkParams(Action action, HttpServletRequest request, Context context, DSpaceObject object) { StringBuilder eventName = new StringBuilder(); if (action == null) { @@ -187,6 +189,12 @@ public UsageEvent(Action action, String ip, String userAgent, String xforwardedf this.object = object; } + public UsageEvent(Action action, HttpServletRequest request, Context context, DSpaceObject object, + String referrer) { + this(action, request, context, object); + setReferrer(referrer); + } + public HttpServletRequest getRequest() { return request; @@ -240,4 +248,11 @@ public Action getAction() { return this.action; } + public String getReferrer() { + return referrer; + } + + public void setReferrer(String referrer) { + this.referrer = referrer; + } } diff --git a/dspace-api/src/main/java/org/dspace/util/ConsoleService.java b/dspace-api/src/main/java/org/dspace/util/ConsoleService.java new file mode 100644 index 000000000000..98dc88d546c8 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/ConsoleService.java @@ -0,0 +1,17 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +/** + * Make System.console mock-able for testing. + * + * @author Mark H. Wood + */ +public interface ConsoleService { + public char[] readPassword(String prompt, Object... args); +} diff --git a/dspace-api/src/main/java/org/dspace/util/ConsoleServiceImpl.java b/dspace-api/src/main/java/org/dspace/util/ConsoleServiceImpl.java new file mode 100644 index 000000000000..a58a87f37be7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/ConsoleServiceImpl.java @@ -0,0 +1,22 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.util; + +/** + * Standard implementation of console IO using {@code System.console()}. + * + * @author Mark H. Wood + */ +public class ConsoleServiceImpl + implements ConsoleService { + @Override + public char[] readPassword(String prompt, Object... args) { + return System.console().readPassword(prompt, args); + } +} diff --git a/dspace-api/src/main/java/org/dspace/util/DateMathParser.java b/dspace-api/src/main/java/org/dspace/util/DateMathParser.java index 7c3e13a28e13..9ff252e8ce3f 100644 --- a/dspace-api/src/main/java/org/dspace/util/DateMathParser.java +++ b/dspace-api/src/main/java/org/dspace/util/DateMathParser.java @@ -26,12 +26,15 @@ import java.util.TimeZone; import java.util.regex.Pattern; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; /** - * This class (Apache license) is copied from Apache Solr and add some tweaks to resolve unneeded dependency: - * https://raw.githubusercontent.com/apache/lucene-solr/releases/lucene-solr/7.1.0/solr/core/src/java/org/apache/solr - * /util/DateMathParser.java + * This class (Apache license) is copied from Apache Solr, adding some tweaks to + * resolve an unneeded dependency. See + * the original. * + *

    * A Simple Utility class for parsing "math" like strings relating to Dates. * *

    @@ -78,7 +81,7 @@ * "setNow" in the interim). The default value of 'now' is * the time at the moment the DateMathParser instance is * constructed, unless overridden by the {@link CommonParams#NOW NOW} - * request param. + * request parameter. *

    * *

    @@ -88,7 +91,7 @@ * cascades to rounding of HOUR, MIN, MONTH, YEAR as well. The default * TimeZone used is UTC unless overridden by the * {@link CommonParams#TZ TZ} - * request param. + * request parameter. *

    * *

    @@ -102,6 +105,8 @@ */ public class DateMathParser { + private static final Logger LOG = LogManager.getLogger(); + public static final TimeZone UTC = TimeZone.getTimeZone("UTC"); /** @@ -119,12 +124,12 @@ public class DateMathParser { /** * A mapping from (uppercased) String labels identifying time units, - * to the corresponding {@link ChronoUnit} enum (e.g. "YEARS") used to + * to the corresponding {@link ChronoUnit} value (e.g. "YEARS") used to * set/add/roll that unit of measurement. * *

    * A single logical unit of time might be represented by multiple labels - * for convenience (ie: DATE==DAYS, + * for convenience (i.e. DATE==DAYS, * MILLI==MILLIS) *

    * @@ -220,6 +225,7 @@ private static LocalDateTime round(LocalDateTime t, String unit) { * * @param now an optional fixed date to use as "NOW" * @param val the string to parse + * @return result of applying the parsed expression to "NOW". * @throws Exception */ public static Date parseMath(Date now, String val) throws Exception { @@ -308,6 +314,7 @@ public TimeZone getTimeZone() { /** * Defines this instance's concept of "now". * + * @param n new value of "now". * @see #getNow */ public void setNow(Date n) { @@ -316,12 +323,12 @@ public void setNow(Date n) { /** * Returns a clone of this instance's concept of "now" (never null). - * * If setNow was never called (or if null was specified) then this method * first defines 'now' as the value dictated by the SolrRequestInfo if it * exists -- otherwise it uses a new Date instance at the moment getNow() * is first called. * + * @return "now". * @see #setNow * @see SolrRequestInfo#getNOW */ @@ -334,9 +341,12 @@ public Date getNow() { } /** - * Parses a string of commands relative "now" are returns the resulting Date. + * Parses a date expression relative to "now". * - * @throws ParseException positions in ParseExceptions are token positions, not character positions. + * @param math a date expression such as "+24MONTHS". + * @return the result of applying the expression to the current time. + * @throws ParseException positions in ParseExceptions are token positions, + * not character positions. */ public Date parseMath(String math) throws ParseException { /* check for No-Op */ @@ -344,6 +354,8 @@ public Date parseMath(String math) throws ParseException { return getNow(); } + LOG.debug("parsing {}", math); + ZoneId zoneId = zone.toZoneId(); // localDateTime is a date and time local to the timezone specified LocalDateTime localDateTime = ZonedDateTime.ofInstant(getNow().toInstant(), zoneId).toLocalDateTime(); @@ -394,11 +406,44 @@ public Date parseMath(String math) throws ParseException { } } + LOG.debug("returning {}", localDateTime); return Date.from(ZonedDateTime.of(localDateTime, zoneId).toInstant()); } private static Pattern splitter = Pattern.compile("\\b|(?<=\\d)(?=\\D)"); + /** + * For manual testing. With one argument, test one-argument parseMath. + * With two (or more) arguments, test two-argument parseMath. + * + * @param argv date math expressions. + * @throws java.lang.Exception passed through. + */ + public static void main(String[] argv) + throws Exception { + DateMathParser parser = new DateMathParser(); + try { + Date parsed; + + if (argv.length <= 0) { + System.err.println("Date math expression(s) expected."); + } + + if (argv.length > 0) { + parsed = parser.parseMath(argv[0]); + System.out.format("Applied %s to implicit current time: %s%n", + argv[0], parsed.toString()); + } + + if (argv.length > 1) { + parsed = DateMathParser.parseMath(new Date(), argv[1]); + System.out.format("Applied %s to explicit current time: %s%n", + argv[1], parsed.toString()); + } + } catch (ParseException ex) { + System.err.format("Oops: %s%n", ex.getMessage()); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java b/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java new file mode 100644 index 000000000000..a50baf910e77 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/FrontendUrlService.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.apache.commons.collections4.CollectionUtils.isNotEmpty; +import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.apache.commons.lang3.StringUtils.lowerCase; + +import java.util.List; +import java.util.Optional; + +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.services.ConfigurationService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * Service class for generation of front-end urls. + */ +@Component +public class FrontendUrlService { + + private static final Logger log = LoggerFactory.getLogger(FrontendUrlService.class); + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private SearchService searchService; + + /** + * Generates front-end url for specified item. + * + * @param context context + * @param item item + * @return front-end url + */ + public String generateUrl(Context context, Item item) { + String uiURL = configurationService.getProperty("dspace.ui.url"); + return generateUrlWithSearchService(item, uiURL, context) + .orElseGet(() -> uiURL + "/items/" + item.getID()); + } + + /** + * Generates front-end url for specified bitstream. + * + * @param bitstream bitstream + * @return front-end url + */ + public String generateUrl(Bitstream bitstream) { + String uiURL = configurationService.getProperty("dspace.ui.url"); + return uiURL + "/bitstreams/" + bitstream.getID() + "/download"; + } + + private Optional generateUrlWithSearchService(Item item, String uiURLStem, Context context) { + DiscoverQuery entityQuery = new DiscoverQuery(); + entityQuery.setQuery("search.uniqueid:\"Item-" + item.getID() + "\" and entityType:*"); + entityQuery.addSearchField("entityType"); + + try { + DiscoverResult discoverResult = searchService.search(context, entityQuery); + if (isNotEmpty(discoverResult.getIndexableObjects())) { + List entityTypes = discoverResult.getSearchDocument(discoverResult.getIndexableObjects() + .get(0)).get(0).getSearchFieldValues("entityType"); + if (isNotEmpty(entityTypes) && isNotBlank(entityTypes.get(0))) { + return Optional.of(uiURLStem + "/entities/" + lowerCase(entityTypes.get(0)) + "/" + item.getID()); + } + } + } catch (SearchServiceException e) { + log.error("Failed getting entitytype through solr for item " + item.getID() + ": " + e.getMessage()); + } + return Optional.empty(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/util/FunctionalUtils.java b/dspace-api/src/main/java/org/dspace/util/FunctionalUtils.java new file mode 100644 index 000000000000..422c2405a875 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/FunctionalUtils.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.util.Objects; +import java.util.function.Predicate; +import java.util.function.Supplier; + +/** + * + * These methods are linked to the functional paradigm and use {@code Functional} interfaces of java 8+, all the main + * interfaces are in the package {@link java.util.function}. + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public class FunctionalUtils { + + /** + * Private constructor, it's an Utils class with static methods / functions. + */ + private FunctionalUtils() { + } + + /** + * + * Tests that {@code defaultValue} isn't null. If this test is positive, then + * returns the {@code defaultValue}; Otherwise builds a new instance using the + * {@code builder} + * + * @param defaultValue default instance value + * @param builder instance generator + * @return corresponding non-null instance + */ + public static T getDefaultOrBuild(T defaultValue, Supplier builder) { + return getCheckDefaultOrBuild(Objects::nonNull, defaultValue, builder); + } + + /** + * Tests the {@code defaultValue} using the {@code defaultValueChecker}. If its + * test is positive, then returns the {@code defaultValue}; Otherwise builds a + * new instance using the {@code builder} + * + * @param defaultValueChecker checker that tests the defaultValue + * @param defaultValue default instance value + * @param builder supplier that generates a typed instance + * @return corresponding instance after check + */ + public static T getCheckDefaultOrBuild(Predicate defaultValueChecker, T defaultValue, Supplier builder) { + if (defaultValueChecker.test(defaultValue)) { + return defaultValue; + } + return builder.get(); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java b/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java new file mode 100644 index 000000000000..2b6f37beb2e1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/MultiFormatDateDeserializer.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.io.IOException; +import java.util.Date; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; + +/** + * This is a custom date deserializer for jackson that make use of our + * {@link MultiFormatDateParser} + * + * Dates are parsed as being in the UTC zone. + * + */ +public class MultiFormatDateDeserializer extends StdDeserializer { + + public MultiFormatDateDeserializer() { + this(null); + } + + public MultiFormatDateDeserializer(Class vc) { + super(vc); + } + + @Override + public Date deserialize(JsonParser jsonparser, DeserializationContext context) + throws IOException, JsonProcessingException { + String date = jsonparser.getText(); + return MultiFormatDateParser.parse(date); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/util/RawJsonDeserializer.java b/dspace-api/src/main/java/org/dspace/util/RawJsonDeserializer.java new file mode 100644 index 000000000000..baadf0d2834f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/RawJsonDeserializer.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.io.IOException; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * Extension of {@link JsonDeserializer} that convert a json to a String. + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public class RawJsonDeserializer extends JsonDeserializer { + + @Override + public String deserialize(JsonParser jp, DeserializationContext ctxt) + throws IOException, JsonProcessingException { + + ObjectMapper mapper = (ObjectMapper) jp.getCodec(); + JsonNode node = mapper.readTree(jp); + return mapper.writeValueAsString(node); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/util/SimpleMapConverter.java b/dspace-api/src/main/java/org/dspace/util/SimpleMapConverter.java new file mode 100644 index 000000000000..2b0d8d96ddec --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/SimpleMapConverter.java @@ -0,0 +1,107 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.io.File; +import java.io.FileInputStream; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.services.ConfigurationService; +import org.springframework.util.Assert; + +/** + * Class that parse a properties file present in the crosswalks directory and + * allows to get its values given a key. + * + * @author Andrea Bollini + * @author Kostas Stamatis + * @author Luigi Andrea Pascarelli + * @author Panagiotis Koutsourakis + * @author Luca Giamminonni + */ +public class SimpleMapConverter { + + private String converterNameFile; // The properties filename + + private ConfigurationService configurationService; + + private Map mapping; + + private String defaultValue = ""; + + /** + * Parse the configured property file. + */ + public void init() { + + Assert.notNull(converterNameFile, "No properties file name provided"); + Assert.notNull(configurationService, "No configuration service provided"); + + String mappingFile = configurationService.getProperty( + "dspace.dir") + File.separator + "config" + File.separator + "crosswalks" + File.separator + + converterNameFile; + + try (FileInputStream fis = new FileInputStream(new File(mappingFile))) { + + Properties mapConfig = new Properties(); + mapConfig.load(fis); + + this.mapping = parseProperties(mapConfig); + + } catch (Exception e) { + throw new IllegalArgumentException("An error occurs parsing " + mappingFile, e); + } + + } + + /** + * Returns the value related to the given key. If the given key is not found the + * incoming value is returned. + * + * @param key the key to search for a value + * @return the value + */ + public String getValue(String key) { + + String value = mapping.getOrDefault(key, defaultValue); + + if (StringUtils.isBlank(value)) { + return key; + } + + return value; + } + + private Map parseProperties(Properties properties) { + + Map mapping = new HashMap(); + + for (Object key : properties.keySet()) { + String keyString = (String) key; + mapping.put(keyString, properties.getProperty(keyString, "")); + } + + return mapping; + + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public void setConverterNameFile(String converterNameFile) { + this.converterNameFile = converterNameFile; + } + + public void setConfigurationService(ConfigurationService configurationService) { + this.configurationService = configurationService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java b/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java index 12a9970539a3..9342cb8b39e8 100644 --- a/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java +++ b/dspace-api/src/main/java/org/dspace/util/SolrUpgradePre6xStatistics.java @@ -240,8 +240,8 @@ private String duration(long dur) { /** * Print a status message appended with the processing time for the operation * - * @param header - * Message to display + * @param numProcessed + * count of records processed so far. * @param fromStart * if true, report on processing time since the start of the program */ @@ -447,7 +447,7 @@ private void run() throws SolrServerException, SQLException, IOException { runReport(); logTime(false); for (int processed = updateRecords(MIGQUERY); (processed != 0) - && (numProcessed < numRec); processed = updateRecords(MIGQUERY)) { + && (numProcessed <= numRec); processed = updateRecords(MIGQUERY)) { printTime(numProcessed, false); batchUpdateStats(); if (context.getCacheSize() > CACHE_LIMIT) { @@ -696,4 +696,4 @@ private UUID mapOwner(String owntype, int val) throws SQLException { return null; } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/util/SolrUtils.java b/dspace-api/src/main/java/org/dspace/util/SolrUtils.java index f62feba29886..7b11d73834bb 100644 --- a/dspace-api/src/main/java/org/dspace/util/SolrUtils.java +++ b/dspace-api/src/main/java/org/dspace/util/SolrUtils.java @@ -35,6 +35,8 @@ private SolrUtils() { } * @return date formatter compatible with Solr. */ public static DateFormat getDateFormatter() { - return new SimpleDateFormat(SolrUtils.SOLR_DATE_FORMAT); + DateFormat formatter = new SimpleDateFormat(SolrUtils.SOLR_DATE_FORMAT); + formatter.setTimeZone(SOLR_TIME_ZONE); + return formatter; } } diff --git a/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java b/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java new file mode 100644 index 000000000000..e1502e89b514 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/ThrowableUtils.java @@ -0,0 +1,43 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +/** + * Things you wish {@link Throwable} or some logging package would do for you. + * + * @author mwood + */ +public class ThrowableUtils { + /** + * Utility class: do not instantiate. + */ + private ThrowableUtils() { } + + /** + * Trace a chain of {@code Throwable}s showing only causes. + * Less voluminous than a stack trace. Useful if you just want to know + * what caused third-party code to return an uninformative exception + * message. + * + * @param throwable the exception or whatever. + * @return list of messages from each {@code Throwable} in the chain, + * separated by '\n'. + */ + static public String formatCauseChain(Throwable throwable) { + StringBuilder trace = new StringBuilder(); + trace.append(throwable.getMessage()); + Throwable cause = throwable.getCause(); + while (null != cause) { + trace.append("\nCaused by: ") + .append(cause.getClass().getCanonicalName()).append(' ') + .append(cause.getMessage()); + cause = cause.getCause(); + } + return trace.toString(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/util/ThrowingSupplier.java b/dspace-api/src/main/java/org/dspace/util/ThrowingSupplier.java new file mode 100644 index 000000000000..a00e675f250e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/ThrowingSupplier.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +/** + * Functional interface that can be used to returns an object and potentially + * throws a Exception. + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@FunctionalInterface +public interface ThrowingSupplier { + + /** + * Returns an object. + * + * @return an object + * @throws E if some error occurs + */ + T get() throws E; +} diff --git a/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java b/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java new file mode 100644 index 000000000000..87d354a7f6c7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * Various manipulations of dates and times. + * + * @author mwood + */ +public class TimeHelpers { + private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); + + /** + * Never instantiate this class. + */ + private TimeHelpers() {} + + /** + * Set a Date's time to midnight UTC. + * + * @param from some date-time. + * @return midnight UTC of the supplied date-time. + */ + public static Date toMidnightUTC(Date from) { + GregorianCalendar calendar = new GregorianCalendar(UTC); + calendar.setTime(from); + calendar.set(GregorianCalendar.HOUR_OF_DAY, 0); + calendar.set(GregorianCalendar.MINUTE, 0); + calendar.set(GregorianCalendar.SECOND, 0); + calendar.set(GregorianCalendar.MILLISECOND, 0); + return calendar.getTime(); + } +} diff --git a/dspace-api/src/main/java/org/dspace/versioning/AbstractVersionProvider.java b/dspace-api/src/main/java/org/dspace/versioning/AbstractVersionProvider.java index 8b0ca9aeb8d4..329332d31526 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/AbstractVersionProvider.java +++ b/dspace-api/src/main/java/org/dspace/versioning/AbstractVersionProvider.java @@ -21,6 +21,7 @@ import org.dspace.content.MetadataField; import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataValue; +import org.dspace.content.RelationshipMetadataValue; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BundleService; import org.dspace.content.service.ItemService; @@ -55,14 +56,24 @@ protected void copyMetadata(Context context, Item itemNew, Item nativeItem) thro MetadataSchema metadataSchema = metadataField.getMetadataSchema(); String unqualifiedMetadataField = metadataSchema.getName() + "." + metadataField.getElement(); if (getIgnoredMetadataFields().contains(metadataField.toString('.')) || - getIgnoredMetadataFields().contains(unqualifiedMetadataField + "." + Item.ANY)) { - //Skip this metadata field + getIgnoredMetadataFields().contains(unqualifiedMetadataField + "." + Item.ANY) || + aMd instanceof RelationshipMetadataValue) { + //Skip this metadata field (ignored and/or virtual) continue; } - itemService - .addMetadata(context, itemNew, metadataField, aMd.getLanguage(), aMd.getValue(), aMd.getAuthority(), - aMd.getConfidence()); + itemService.addMetadata( + context, + itemNew, + metadataField.getMetadataSchema().getName(), + metadataField.getElement(), + metadataField.getQualifier(), + aMd.getLanguage(), + aMd.getValue(), + aMd.getAuthority(), + aMd.getConfidence(), + aMd.getPlace() + ); } } diff --git a/dspace-api/src/main/java/org/dspace/versioning/DefaultItemVersionProvider.java b/dspace-api/src/main/java/org/dspace/versioning/DefaultItemVersionProvider.java index 7903a49c3148..d4590ae24ea2 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/DefaultItemVersionProvider.java +++ b/dspace-api/src/main/java/org/dspace/versioning/DefaultItemVersionProvider.java @@ -15,7 +15,9 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.content.Item; +import org.dspace.content.Relationship; import org.dspace.content.WorkspaceItem; +import org.dspace.content.service.RelationshipService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Context; import org.dspace.identifier.IdentifierException; @@ -44,6 +46,8 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen protected VersioningService versioningService; @Autowired(required = true) protected IdentifierService identifierService; + @Autowired(required = true) + protected RelationshipService relationshipService; @Override public Item createNewItemAndAddItInWorkspace(Context context, Item nativeItem) { @@ -89,10 +93,18 @@ public void deleteVersionedItem(Context c, Version versionToDelete, VersionHisto } } + /** + * Copy all data (minus a few exceptions) from the old item to the new item. + * @param c the DSpace context. + * @param itemNew the new version of the item. + * @param previousItem the old version of the item. + * @return the new version of the item, with data from the old item. + */ @Override public Item updateItemState(Context c, Item itemNew, Item previousItem) { try { copyMetadata(c, itemNew, previousItem); + copyRelationships(c, itemNew, previousItem); createBundlesAndAddBitstreams(c, itemNew, previousItem); try { identifierService.reserve(c, itemNew); @@ -114,4 +126,49 @@ public Item updateItemState(Context c, Item itemNew, Item previousItem) { throw new RuntimeException(e.getMessage(), e); } } + + /** + * Copy all relationships of the old item to the new item. + * At this point in the lifecycle of the item-version (before archival), only the opposite item receives + * "latest" status. On item archival of the item-version, the "latest" status of the relevant relationships + * will be updated. + * @param context the DSpace context. + * @param newItem the new version of the item. + * @param oldItem the old version of the item. + */ + protected void copyRelationships( + Context context, Item newItem, Item oldItem + ) throws SQLException, AuthorizeException { + List oldRelationships = relationshipService.findByItem(context, oldItem, -1, -1, false, true); + for (Relationship oldRelationship : oldRelationships) { + if (oldRelationship.getLeftItem().equals(oldItem)) { + // current item is on left side of this relationship + relationshipService.create( + context, + newItem, // new item + oldRelationship.getRightItem(), + oldRelationship.getRelationshipType(), + oldRelationship.getLeftPlace(), + oldRelationship.getRightPlace(), + oldRelationship.getLeftwardValue(), + oldRelationship.getRightwardValue(), + Relationship.LatestVersionStatus.RIGHT_ONLY // only mark the opposite side as "latest" for now + ); + } else if (oldRelationship.getRightItem().equals(oldItem)) { + // current item is on right side of this relationship + relationshipService.create( + context, + oldRelationship.getLeftItem(), + newItem, // new item + oldRelationship.getRelationshipType(), + oldRelationship.getLeftPlace(), + oldRelationship.getRightPlace(), + oldRelationship.getLeftwardValue(), + oldRelationship.getRightwardValue(), + Relationship.LatestVersionStatus.LEFT_ONLY // only mark the opposite side as "latest" for now + ); + } + } + } + } diff --git a/dspace-api/src/main/java/org/dspace/versioning/ItemVersionProvider.java b/dspace-api/src/main/java/org/dspace/versioning/ItemVersionProvider.java index 83369e04650d..74014b62626d 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/ItemVersionProvider.java +++ b/dspace-api/src/main/java/org/dspace/versioning/ItemVersionProvider.java @@ -22,5 +22,12 @@ public interface ItemVersionProvider { public void deleteVersionedItem(Context c, Version versionToDelete, VersionHistory history) throws SQLException; + /** + * Copy all data (minus a few exceptions) from the old item to the new item. + * @param c the DSpace context. + * @param itemNew the new version of the item. + * @param previousItem the old version of the item. + * @return the new version of the item, with data from the old item. + */ public Item updateItemState(Context c, Item itemNew, Item previousItem); } diff --git a/dspace-api/src/main/java/org/dspace/versioning/Version.java b/dspace-api/src/main/java/org/dspace/versioning/Version.java index 2d4d359545b6..ee5c1c418338 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/Version.java +++ b/dspace-api/src/main/java/org/dspace/versioning/Version.java @@ -77,6 +77,7 @@ protected Version() { } + @Override public Integer getID() { return id; } diff --git a/dspace-api/src/main/java/org/dspace/versioning/VersionHistory.java b/dspace-api/src/main/java/org/dspace/versioning/VersionHistory.java index 1acacc783876..231ccc29d973 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/VersionHistory.java +++ b/dspace-api/src/main/java/org/dspace/versioning/VersionHistory.java @@ -9,6 +9,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Objects; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; @@ -20,11 +21,12 @@ import javax.persistence.SequenceGenerator; import javax.persistence.Table; -import org.apache.logging.log4j.Logger; +import org.apache.commons.collections4.CollectionUtils; import org.dspace.core.Context; import org.dspace.core.ReloadableEntity; import org.hibernate.proxy.HibernateProxyHelper; + /** * @author Fabio Bolognesi (fabio at atmire dot com) * @author Mark Diggory (markd at atmire dot com) @@ -35,8 +37,6 @@ @Table(name = "versionhistory") public class VersionHistory implements ReloadableEntity { - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(VersionHistory.class); - @Id @Column(name = "versionhistory_id") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "versionhistory_seq") @@ -56,6 +56,7 @@ protected VersionHistory() { } + @Override public Integer getID() { return id; } @@ -87,6 +88,18 @@ void removeVersion(Version version) { this.versions.remove(version); } + /** + * Verify if there is a version's item in submission. + * + * @return true if the last version in submission, otherwise false. + */ + public boolean hasDraftVersion() { + if (CollectionUtils.isNotEmpty(versions) && Objects.nonNull(versions.get(0).getItem())) { + return !versions.get(0).getItem().isArchived(); + } + return false; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -98,11 +111,7 @@ public boolean equals(Object o) { } final VersionHistory that = (VersionHistory) o; - if (!this.getID().equals(that.getID())) { - return false; - } - - return true; + return this.getID().equals(that.getID()); } @Override diff --git a/dspace-api/src/main/java/org/dspace/versioning/VersionHistoryServiceImpl.java b/dspace-api/src/main/java/org/dspace/versioning/VersionHistoryServiceImpl.java index c7a2f9044c4a..96c39ac3a8e8 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/VersionHistoryServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/versioning/VersionHistoryServiceImpl.java @@ -11,11 +11,15 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; import org.apache.commons.collections4.CollectionUtils; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Item; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.services.ConfigurationService; import org.dspace.versioning.dao.VersionHistoryDAO; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; @@ -34,6 +38,12 @@ public class VersionHistoryServiceImpl implements VersionHistoryService { @Autowired(required = true) private VersioningService versioningService; + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private ConfigurationService configurationService; + protected VersionHistoryServiceImpl() { } @@ -210,4 +220,20 @@ public VersionHistory findByItem(Context context, Item item) throws SQLException return versionHistoryDAO.findByItem(context, item); } + @Override + public boolean canSeeDraftVersion(Context context, VersionHistory versionHistory) throws SQLException { + Version version = this.getLatestVersion(context, versionHistory); + if (Objects.nonNull(version)) { + EPerson submitter = version.getItem().getSubmitter(); + boolean isAdmin = authorizeService.isAdmin(context); + boolean canCreateVersion = configurationService + .getBooleanProperty("versioning.submitterCanCreateNewVersion"); + if (!isAdmin && !(canCreateVersion && Objects.equals(submitter, context.getCurrentUser()))) { + return false; + } + return true; + } + return false; + } + } diff --git a/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java b/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java index 6683419844e1..63b5391d0a28 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java +++ b/dspace-api/src/main/java/org/dspace/versioning/VersioningConsumer.java @@ -7,39 +7,66 @@ */ package org.dspace.versioning; +import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.NO_CHANGES; + +import java.sql.SQLException; import java.util.HashSet; +import java.util.List; import java.util.Set; +import java.util.stream.Collectors; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.EntityType; import org.dspace.content.Item; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.RelationshipTypeService; import org.dspace.core.Constants; import org.dspace.core.Context; +import org.dspace.discovery.IndexEventConsumer; import org.dspace.event.Consumer; import org.dspace.event.Event; import org.dspace.versioning.factory.VersionServiceFactory; import org.dspace.versioning.service.VersionHistoryService; -import org.dspace.versioning.service.VersioningService; +import org.dspace.versioning.utils.RelationshipVersioningUtils; +import org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog; /** + * When a new version of an item is published, unarchive the previous version and + * update {@link Relationship#latestVersionStatus} of the relevant relationships. + * * @author Fabio Bolognesi (fabio at atmire dot com) * @author Mark Diggory (markd at atmire dot com) * @author Ben Bosman (ben at atmire dot com) */ public class VersioningConsumer implements Consumer { - private static Set itemsToProcess; + private static final Logger log = LogManager.getLogger(VersioningConsumer.class); + + private Set itemsToProcess; private VersionHistoryService versionHistoryService; - private VersioningService versioningService; private ItemService itemService; - + private EntityTypeService entityTypeService; + private RelationshipTypeService relationshipTypeService; + private RelationshipService relationshipService; + private RelationshipVersioningUtils relationshipVersioningUtils; @Override public void initialize() throws Exception { versionHistoryService = VersionServiceFactory.getInstance().getVersionHistoryService(); - versioningService = VersionServiceFactory.getInstance().getVersionService(); itemService = ContentServiceFactory.getInstance().getItemService(); + entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); + relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService(); + relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + relationshipVersioningUtils = VersionServiceFactory.getInstance().getRelationshipVersioningUtils(); } @Override @@ -49,35 +76,397 @@ public void finish(Context ctx) throws Exception { @Override public void consume(Context ctx, Event event) throws Exception { if (itemsToProcess == null) { - itemsToProcess = new HashSet(); - } - - int st = event.getSubjectType(); - int et = event.getEventType(); - - if (st == Constants.ITEM && et == Event.INSTALL) { - Item item = (Item) event.getSubject(ctx); - if (item != null && item.isArchived()) { - VersionHistory history = versionHistoryService.findByItem(ctx, item); - if (history != null) { - Version latest = versionHistoryService.getLatestVersion(ctx, history); - Version previous = versionHistoryService.getPrevious(ctx, history, latest); - if (previous != null) { - Item previousItem = previous.getItem(); - if (previousItem != null) { - previousItem.setArchived(false); - itemsToProcess.add(previousItem); - //Fire a new modify event for our previous item - //Due to the need to reindex the item in the search - //and browse index we need to fire a new event - ctx.addEvent(new Event(Event.MODIFY, - previousItem.getType(), previousItem.getID(), - null, itemService.getIdentifiers(ctx, previousItem))); - } - } + itemsToProcess = new HashSet<>(); + } + + // only items + if (event.getSubjectType() != Constants.ITEM) { + return; + } + + // only install events + if (event.getEventType() != Event.INSTALL) { + return; + } + + // get the item (should be archived) + Item item = (Item) event.getSubject(ctx); + if (item == null || !item.isArchived()) { + return; + } + + // get version history + VersionHistory history = versionHistoryService.findByItem(ctx, item); + if (history == null) { + return; + } + + // get latest version + Version latestVersion = versionHistoryService.getLatestVersion(ctx, history); + if (latestVersion == null) { + return; + } + + // get previous version + Version previousVersion = versionHistoryService.getPrevious(ctx, history, latestVersion); + if (previousVersion == null) { + return; + } + + // get latest item + Item latestItem = latestVersion.getItem(); + if (latestItem == null) { + String msg = String.format( + "Illegal state: Obtained version history of item with uuid %s, handle %s, but the latest item is null", + item.getID(), item.getHandle() + ); + log.error(msg); + throw new IllegalStateException(msg); + } + + // get previous item + Item previousItem = previousVersion.getItem(); + if (previousItem == null) { + return; + } + + // unarchive previous item + unarchiveItem(ctx, previousItem); + + // update relationships + updateRelationships(ctx, latestItem, previousItem); + } + + protected void unarchiveItem(Context ctx, Item item) { + item.setArchived(false); + itemsToProcess.add(item); + //Fire a new modify event for our previous item + //Due to the need to reindex the item in the search + //and browse index we need to fire a new event + ctx.addEvent(new Event( + Event.MODIFY, item.getType(), item.getID(), null, itemService.getIdentifiers(ctx, item) + )); + } + + /** + * Update {@link Relationship#latestVersionStatus} of the relationships of both the old version and the new version + * of the item. + * + * This method will first locate all relationships that are eligible for an update, + * then it will try to match each of those relationships on the old version of given item + * with a relationship on the new version. + * + * One of the following scenarios will happen: + * - if a match is found, then the "latest" status on the side of given item is transferred from + * the old relationship to the new relationship. This implies that on the page of the third-party item, + * the old version of given item will NOT be shown anymore and the new version of given item will appear. + * Both versions of the given item still show the third-party item on their pages. + * - if a relationship only exists on the new version of given item, then this method does nothing. + * The status of those relationships should already have been set to "latest" on both sides during relationship + * creation. + * - if a relationship only exists on the old version of given item, then we assume that the relationship is no + * longer relevant to / has been removed from the new version of the item. The "latest" status is removed from + * the side of the given item. This implies that on the page of the third-party item, + * the relationship with given item will no longer be listed. The old version of given item still lists + * the third-party item and the new version doesn't. + * @param ctx the DSpace context. + * @param latestItem the new version of the item. + * @param previousItem the old version of the item. + */ + protected void updateRelationships(Context ctx, Item latestItem, Item previousItem) { + // check that the entity types of both items match + if (!doEntityTypesMatch(latestItem, previousItem)) { + return; + } + + // get the entity type (same for both items) + EntityType entityType = getEntityType(ctx, latestItem); + if (entityType == null) { + return; + } + + // get all relationship types that are linked to the given entity type + List relationshipTypes = getRelationshipTypes(ctx, entityType); + if (CollectionUtils.isEmpty(relationshipTypes)) { + return; + } + + for (RelationshipType relationshipType : relationshipTypes) { + List latestItemRelationships = getAllRelationships(ctx, latestItem, relationshipType); + if (latestItemRelationships == null) { + continue; + } + + List previousItemRelationships = getAllRelationships(ctx, previousItem, relationshipType); + if (previousItemRelationships == null) { + continue; + } + + // NOTE: no need to loop through latestItemRelationships, because if no match can be found + // (meaning a relationship is only present on the new version of the item), then it's + // a newly added relationship and its status should have been set to BOTH during creation. + for (Relationship previousItemRelationship : previousItemRelationships) { + // determine on which side of the relationship the latest and previous item should be + boolean isLeft = previousItem.equals(previousItemRelationship.getLeftItem()); + boolean isRight = previousItem.equals(previousItemRelationship.getRightItem()); + if (isLeft == isRight) { + Item leftItem = previousItemRelationship.getLeftItem(); + Item rightItem = previousItemRelationship.getRightItem(); + String msg = String.format( + "Illegal state: could not determine side of item with uuid %s, handle %s in " + + "relationship with id %s, rightward name %s between " + + "left item with uuid %s, handle %s and right item with uuid %s, handle %s", + previousItem.getID(), previousItem.getHandle(), previousItemRelationship.getID(), + previousItemRelationship.getRelationshipType().getRightwardType(), + leftItem.getID(), leftItem.getHandle(), rightItem.getID(), rightItem.getHandle() + ); + log.error(msg); + throw new IllegalStateException(msg); } + + // get the matching relationship on the latest item + Relationship latestItemRelationship = + getMatchingRelationship(latestItem, isLeft, previousItemRelationship, latestItemRelationships); + + // the other side of the relationship should be "latest", otherwise the relationship could not have been + // copied to the new item in the first place (by DefaultVersionProvider#copyRelationships) + if (relationshipVersioningUtils.otherSideIsLatest( + isLeft, previousItemRelationship.getLatestVersionStatus() + )) { + // Set the previous version of the item to non-latest. This implies that the previous version + // of the item will not be shown anymore on the page of the third-party item. That makes sense, + // because either the relationship has been deleted from the new version of the item (no match), + // or the matching relationship (linked to new version) will receive "latest" status in + // the next step. + LatestVersionStatusChangelog changelog = + relationshipVersioningUtils.updateLatestVersionStatus(previousItemRelationship, isLeft, false); + reindexRelationship(ctx, changelog, previousItemRelationship); + } + + if (latestItemRelationship != null) { + // Set the new version of the item to latest if the relevant relationship exists (match found). + // This implies that the new version of the item will appear on the page of the third-party item. + // The old version of the item will not appear anymore on the page of the third-party item, + // see previous step. + LatestVersionStatusChangelog changelog = + relationshipVersioningUtils.updateLatestVersionStatus(latestItemRelationship, isLeft, true); + reindexRelationship(ctx, changelog, latestItemRelationship); + } + } + } + } + + /** + * If the {@link Relationship#latestVersionStatus} of the relationship has changed, + * an "item modified" event should be fired for both the left and right item of the relationship. + * On one item the relation.* fields will change. On the other item the relation.*.latestForDiscovery will change. + * The event will cause the items to be re-indexed by the {@link IndexEventConsumer}. + * @param ctx the DSpace context. + * @param changelog indicates which side of the relationship has changed. + * @param relationship the relationship. + */ + protected void reindexRelationship( + Context ctx, LatestVersionStatusChangelog changelog, Relationship relationship + ) { + if (changelog == NO_CHANGES) { + return; + } + + // on one item, relation.* fields will change + // on the other item, relation.*.latestForDiscovery will change + + // reindex left item + Item leftItem = relationship.getLeftItem(); + itemsToProcess.add(leftItem); + ctx.addEvent(new Event( + Event.MODIFY, leftItem.getType(), leftItem.getID(), null, itemService.getIdentifiers(ctx, leftItem) + )); + + // reindex right item + Item rightItem = relationship.getRightItem(); + itemsToProcess.add(rightItem); + ctx.addEvent(new Event( + Event.MODIFY, rightItem.getType(), rightItem.getID(), null, itemService.getIdentifiers(ctx, rightItem) + )); + } + + /** + * Given two items, check if their entity types match. + * If one or both items don't have an entity type, comparing is pointless and this method will return false. + * @param latestItem the item that represents the most recent version. + * @param previousItem the item that represents the second-most recent version. + * @return true if the entity types of both items are non-null and equal, false otherwise. + */ + protected boolean doEntityTypesMatch(Item latestItem, Item previousItem) { + String latestItemEntityType = itemService.getEntityTypeLabel(latestItem); + String previousItemEntityType = itemService.getEntityTypeLabel(previousItem); + + // check if both items have an entity type + if (latestItemEntityType == null || previousItemEntityType == null) { + if (previousItemEntityType != null) { + log.warn( + "Inconsistency: Item with uuid {}, handle {} has NO entity type, " + + "but the previous version of that item with uuid {}, handle {} has entity type {}", + latestItem.getID(), latestItem.getHandle(), + previousItem.getID(), previousItem.getHandle(), previousItemEntityType + ); } + + // one or both items do not have an entity type, so comparing is pointless + return false; + } + + // check if the entity types are equal + if (!StringUtils.equals(latestItemEntityType, previousItemEntityType)) { + log.warn( + "Inconsistency: Item with uuid {}, handle {} has entity type {}, " + + "but the previous version of that item with uuid {}, handle {} has entity type {}", + latestItem.getID(), latestItem.getHandle(), latestItemEntityType, + previousItem.getID(), previousItem.getHandle(), previousItemEntityType + ); + return false; + } + + // success - the entity types of both items are non-null and equal + log.info( + "Item with uuid {}, handle {} and the previous version of that item with uuid {}, handle {} " + + "have the same entity type: {}", + latestItem.getID(), latestItem.getHandle(), previousItem.getID(), previousItem.getHandle(), + latestItemEntityType + ); + return true; + } + + /** + * Get the entity type (stored in metadata field dspace.entity.type) of any item. + * @param item the item. + * @return the entity type. + */ + protected EntityType getEntityType(Context ctx, Item item) { + try { + return itemService.getEntityType(ctx, item); + } catch (SQLException e) { + log.error( + "Exception occurred when trying to obtain entity type with label {} of item with uuid {}, handle {}", + itemService.getEntityTypeLabel(item), item.getID(), item.getHandle(), e + ); + return null; + } + } + + /** + * Get all relationship types that have the given entity type on their left and/or right side. + * @param ctx the DSpace context. + * @param entityType the entity type for which all relationship types should be found. + * @return a list of relationship types (possibly empty), or null in case of error. + */ + protected List getRelationshipTypes(Context ctx, EntityType entityType) { + try { + return relationshipTypeService.findByEntityType(ctx, entityType); + } catch (SQLException e) { + log.error( + "Exception occurred when trying to obtain relationship types via entity type with id {}, label {}", + entityType.getID(), entityType.getLabel(), e + ); + return null; + } + } + + /** + * Get all relationships of the given type linked to the given item. + * @param ctx the DSpace context. + * @param item the item. + * @param relationshipType the relationship type. + * @return a list of relationships (possibly empty), or null in case of error. + */ + protected List getAllRelationships(Context ctx, Item item, RelationshipType relationshipType) { + try { + return relationshipService.findByItemAndRelationshipType(ctx, item, relationshipType, -1, -1, false); + } catch (SQLException e) { + log.error( + "Exception occurred when trying to obtain relationships of type with id {}, rightward name {} " + + "for item with uuid {}, handle {}", + relationshipType.getID(), relationshipType.getRightwardType(), item.getID(), item.getHandle(), e + ); + return null; + } + } + + /** + * From a list of relationships, find the relationship with the correct relationship type and items. + * If isLeft is true, the provided item should be on the left side of the relationship. + * If isLeft is false, the provided item should be on the right side of the relationship. + * In both cases, the other item is taken from the given relationship. + * @param latestItem the item that should either be on the left or right side of the returned relationship (if any). + * @param isLeft decide on which side of the relationship the provided item should be. + * @param previousItemRelationship the relationship from which the type and the other item are read. + * @param relationships the list of relationships that we'll search through. + * @return the relationship that satisfies the requirements (can only be one or zero). + */ + protected Relationship getMatchingRelationship( + Item latestItem, boolean isLeft, Relationship previousItemRelationship, List relationships + ) { + Item leftItem = previousItemRelationship.getLeftItem(); + RelationshipType relationshipType = previousItemRelationship.getRelationshipType(); + Item rightItem = previousItemRelationship.getRightItem(); + + if (isLeft) { + return getMatchingRelationship(latestItem, relationshipType, rightItem, relationships); + } else { + return getMatchingRelationship(leftItem, relationshipType, latestItem, relationships); + } + } + + + /** + * Find the relationship with the given left item, relation type and right item, from a list of relationships. + * @param expectedLeftItem the relationship that we're looking for has this item on the left side. + * @param expectedRelationshipType the relationship that we're looking for has this relationship type. + * @param expectedRightItem the relationship that we're looking for has this item on the right side. + * @param relationships the list of relationships that we'll search through. + * @return the relationship that satisfies the requirements (can only be one or zero). + */ + protected Relationship getMatchingRelationship( + Item expectedLeftItem, RelationshipType expectedRelationshipType, Item expectedRightItem, + List relationships + ) { + Integer expectedRelationshipTypeId = expectedRelationshipType.getID(); + + List matchingRelationships = relationships.stream() + .filter(relationship -> { + int relationshipTypeId = relationship.getRelationshipType().getID(); + + boolean leftItemMatches = expectedLeftItem.equals(relationship.getLeftItem()); + boolean relationshipTypeMatches = expectedRelationshipTypeId == relationshipTypeId; + boolean rightItemMatches = expectedRightItem.equals(relationship.getRightItem()); + + return leftItemMatches && relationshipTypeMatches && rightItemMatches; + }) + .distinct() + .collect(Collectors.toUnmodifiableList()); + + if (matchingRelationships.isEmpty()) { + return null; } + + // NOTE: this situation should never occur because the relationship table has a unique constraint + // over the "left_id", "type_id" and "right_id" columns + if (matchingRelationships.size() > 1) { + String msg = String.format( + "Illegal state: expected 0 or 1 relationship, but found %s relationships (ids: %s) " + + "of type with id %s, rightward name %s " + + "between left item with uuid %s, handle %s and right item with uuid %s, handle %s", + matchingRelationships.size(), + matchingRelationships.stream().map(Relationship::getID).collect(Collectors.toUnmodifiableList()), + expectedRelationshipTypeId, expectedRelationshipType.getRightwardType(), + expectedLeftItem.getID(), expectedLeftItem.getHandle(), + expectedRightItem.getID(), expectedRightItem.getHandle() + ); + log.error(msg); + throw new IllegalStateException(msg); + } + + return matchingRelationships.get(0); } @Override diff --git a/dspace-api/src/main/java/org/dspace/versioning/VersioningServiceImpl.java b/dspace-api/src/main/java/org/dspace/versioning/VersioningServiceImpl.java index ee6adf909857..ece536e81b26 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/VersioningServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/versioning/VersioningServiceImpl.java @@ -10,6 +10,7 @@ import java.sql.SQLException; import java.util.Date; import java.util.List; +import java.util.Objects; import org.dspace.content.DCDate; import org.dspace.content.Item; @@ -96,7 +97,7 @@ public Version createNewVersion(Context c, Item item, String summary) { } @Override - public void removeVersion(Context c, Version version) throws SQLException { + public void delete(Context c, Version version) throws SQLException { try { // we will first delete the version and then the item // after deletion of the version we cannot find the item anymore @@ -158,7 +159,7 @@ public void removeVersion(Context c, Version version) throws SQLException { public void removeVersion(Context c, Item item) throws SQLException { Version version = versionDAO.findByItem(c, item); if (version != null) { - removeVersion(c, version); + delete(c, version); } } @@ -196,8 +197,11 @@ public Version createNewVersion(Context context, VersionHistory history, Item it int versionNumber) { try { Version version = versionDAO.create(context, new Version()); - - version.setVersionNumber(getNextVersionNumer(context, history)); + if (versionNumber > 0 && !isVersionExist(context, item, versionNumber)) { + version.setVersionNumber(versionNumber); + } else { + version.setVersionNumber(getNextVersionNumer(context, history)); + } version.setVersionDate(date); version.setePerson(item.getSubmitter()); version.setItem(item); @@ -211,12 +215,27 @@ public Version createNewVersion(Context context, VersionHistory history, Item it } } + private boolean isVersionExist(Context context, Item item, int versionNumber) throws SQLException { + VersionHistory history = versionHistoryService.findByItem(context, item); + if (Objects.isNull(history)) { + return false; + } + return history.getVersions().stream().filter(v -> v.getVersionNumber() == versionNumber) + .findFirst() + .isPresent(); + } + @Override public List getVersionsByHistory(Context c, VersionHistory vh) throws SQLException { - List versions = versionDAO.findVersionsWithItems(c, vh); + List versions = versionDAO.findVersionsWithItems(c, vh, -1, -1); return versions; } + @Override + public List getVersionsByHistoryWithItems(Context c, VersionHistory vh, int offset, int limit) + throws SQLException { + return versionDAO.findVersionsWithItems(c, vh, offset, limit); + } // **** PROTECTED METHODS!! @@ -236,4 +255,15 @@ protected int getNextVersionNumer(Context c, VersionHistory vh) throws SQLExcept return next; } + + @Override + public void update(Context context, Version version) throws SQLException { + versionDAO.save(context, version); + } + + @Override + public int countVersionsByHistoryWithItem(Context context, VersionHistory versionHistory) throws SQLException { + return versionDAO.countVersionsByHistoryWithItem(context, versionHistory); + } + } diff --git a/dspace-api/src/main/java/org/dspace/versioning/dao/VersionDAO.java b/dspace-api/src/main/java/org/dspace/versioning/dao/VersionDAO.java index 52bcb978f169..6730ece3f516 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/dao/VersionDAO.java +++ b/dspace-api/src/main/java/org/dspace/versioning/dao/VersionDAO.java @@ -36,12 +36,28 @@ public interface VersionDAO extends GenericDAO { * * @param context The relevant DSpace Context. * @param versionHistory version history + * @param offset the position of the first result to return + * @param limit paging limit * @return all versions of an version history that have items assigned. * @throws SQLException An exception that provides information on a database access error or other errors. */ - public List findVersionsWithItems(Context context, VersionHistory versionHistory) + public List findVersionsWithItems(Context context, VersionHistory versionHistory, int offset, int limit) throws SQLException; public int getNextVersionNumber(Context c, VersionHistory vh) throws SQLException; + /** + * This method count versions of an version history that have items + * assigned. We do not delete versions to keep version numbers stable. To + * remove a version we set the item, date, summary and eperson null. This + * method returns only versions that aren't soft deleted and have items + * assigned. + * + * @param context The relevant DSpace Context. + * @param versionHistory Version history + * @return Total versions of an version history that have items assigned. + * @throws SQLException If database error + */ + public int countVersionsByHistoryWithItem(Context context, VersionHistory versionHistory) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/versioning/dao/impl/VersionDAOImpl.java b/dspace-api/src/main/java/org/dspace/versioning/dao/impl/VersionDAOImpl.java index 6633c892eae4..0e28e72d0752 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/dao/impl/VersionDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/versioning/dao/impl/VersionDAOImpl.java @@ -61,7 +61,7 @@ public int getNextVersionNumber(Context c, VersionHistory vh) throws SQLExceptio } @Override - public List findVersionsWithItems(Context context, VersionHistory versionHistory) + public List findVersionsWithItems(Context context, VersionHistory versionHistory, int offset, int limit) throws SQLException { CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); @@ -78,6 +78,16 @@ public List findVersionsWithItems(Context context, VersionHistory versi orderList.add(criteriaBuilder.desc(versionRoot.get(Version_.versionNumber))); criteriaQuery.orderBy(orderList); - return list(context, criteriaQuery, false, Version.class, -1, -1); + return list(context, criteriaQuery, false, Version.class, limit, offset); } + + @Override + public int countVersionsByHistoryWithItem(Context context, VersionHistory versionHistory) throws SQLException { + Query query = createQuery(context, "SELECT count(*) FROM " + Version.class.getSimpleName() + + " WHERE versionhistory_id = (:versionhistoryId)" + + " AND item_id IS NOT NULL"); + query.setParameter("versionhistoryId", versionHistory); + return count(query); + } + } diff --git a/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactory.java b/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactory.java index ecc3315a727d..8e8cc786ca46 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactory.java +++ b/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactory.java @@ -10,6 +10,7 @@ import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.dspace.versioning.utils.RelationshipVersioningUtils; /** * Abstract factory to get services for the versioning package, use VersionServiceFactory.getInstance() to retrieve @@ -23,6 +24,8 @@ public abstract class VersionServiceFactory { public abstract VersioningService getVersionService(); + public abstract RelationshipVersioningUtils getRelationshipVersioningUtils(); + public static VersionServiceFactory getInstance() { return DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName("versionServiceFactory", VersionServiceFactory.class); diff --git a/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactoryImpl.java index 613cb4faf413..97e4083426ad 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/versioning/factory/VersionServiceFactoryImpl.java @@ -9,6 +9,7 @@ import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.dspace.versioning.utils.RelationshipVersioningUtils; import org.springframework.beans.factory.annotation.Autowired; /** @@ -25,6 +26,9 @@ public class VersionServiceFactoryImpl extends VersionServiceFactory { @Autowired(required = true) protected VersioningService versionService; + @Autowired(required = true) + protected RelationshipVersioningUtils relationshipVersioningUtils; + @Override public VersionHistoryService getVersionHistoryService() { return versionHistoryService; @@ -34,4 +38,10 @@ public VersionHistoryService getVersionHistoryService() { public VersioningService getVersionService() { return versionService; } + + @Override + public RelationshipVersioningUtils getRelationshipVersioningUtils() { + return relationshipVersioningUtils; + } + } diff --git a/dspace-api/src/main/java/org/dspace/versioning/service/VersionHistoryService.java b/dspace-api/src/main/java/org/dspace/versioning/service/VersionHistoryService.java index e7c88879a544..e4f4aa2e33dd 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/service/VersionHistoryService.java +++ b/dspace-api/src/main/java/org/dspace/versioning/service/VersionHistoryService.java @@ -67,4 +67,16 @@ public boolean isLastVersion(Context context, VersionHistory versionHistory, Ver public void remove(VersionHistory versionHistory, Version version); + /** + * This method has a scope to verify if the logged user has permission + * to see the attribute 'draftVersion' of the latest version. + * + * @param context DSpace context object + * @param versionHistory Version history object + * @return return true if the logged user has permission to see + * the attribute 'draftVersion' of the latest version, otherwise false + * @throws SQLException If database error + */ + public boolean canSeeDraftVersion(Context context, VersionHistory versionHistory) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/versioning/service/VersioningService.java b/dspace-api/src/main/java/org/dspace/versioning/service/VersioningService.java index 56d52e395328..2f6df5b732f7 100644 --- a/dspace-api/src/main/java/org/dspace/versioning/service/VersioningService.java +++ b/dspace-api/src/main/java/org/dspace/versioning/service/VersioningService.java @@ -40,7 +40,29 @@ public interface VersioningService { */ List getVersionsByHistory(Context c, VersionHistory vh) throws SQLException; - void removeVersion(Context c, Version version) throws SQLException; + /** + * Return a paginated list of versions of a version history. + * To keep version numbers stable we do not delete versions, we do only set + * the item, date, summary and eperson null. This methods returns only those + * versions that have an item assigned. + * + * @param c The relevant DSpace Context. + * @param vh Version history + * @param offset The position of the first result to return + * @param limit Paging limit + * @throws SQLException If database error + */ + List getVersionsByHistoryWithItems(Context c, VersionHistory vh, int offset, int limit) + throws SQLException; + + /** + * Delete a Version + * + * @param context context + * @param version version + * @throws SQLException if database error + */ + public void delete(Context context, Version version) throws SQLException; void removeVersion(Context c, Item item) throws SQLException; @@ -56,4 +78,28 @@ public interface VersioningService { Version createNewVersion(Context context, VersionHistory history, Item item, String summary, Date date, int versionNumber); + + /** + * Update the Version + * + * @param context context + * @param version version + * @throws SQLException if database error + */ + public void update(Context context, Version version) throws SQLException; + + /** + * This method count versions of an version history that have items + * assigned. We do not delete versions to keep version numbers stable. To + * remove a version we set the item, date, summary and eperson null. This + * method returns only versions that aren't soft deleted and have items + * assigned. + * + * @param context The relevant DSpace Context. + * @param versionHistory Version history + * @return Total versions of an version history that have items assigned. + * @throws SQLException If database error + */ + public int countVersionsByHistoryWithItem(Context context, VersionHistory versionHistory) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/versioning/utils/RelationshipVersioningUtils.java b/dspace-api/src/main/java/org/dspace/versioning/utils/RelationshipVersioningUtils.java new file mode 100644 index 000000000000..5e401760825f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/versioning/utils/RelationshipVersioningUtils.java @@ -0,0 +1,114 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.versioning.utils; + +import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.LEFT_SIDE_CHANGED; +import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.NO_CHANGES; +import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.RIGHT_SIDE_CHANGED; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Relationship; +import org.dspace.content.Relationship.LatestVersionStatus; + +/** + * Class with utility methods to manipulate relationships that are linked to versioned items. + * Specifically focussed on the "latest version status" of relationships, + * which controls which related items are relevant (visible) to any given item. + */ +public class RelationshipVersioningUtils { + + private static final Logger log = LogManager.getLogger(RelationshipVersioningUtils.class); + + /** + * Given a latest version status, check if the other side is "latest". + * If we look from the left, this implies BOTH and RIGHT_ONLY return true. + * If we look from the right, this implies BOTH and LEFT_ONLY return true. + * @param isLeft whether we should look from the left or right side. + * @param latestVersionStatus the latest version status. + * @return true if the other side has "latest" status, false otherwise. + */ + public boolean otherSideIsLatest(boolean isLeft, LatestVersionStatus latestVersionStatus) { + if (latestVersionStatus == LatestVersionStatus.BOTH) { + return true; + } + + return latestVersionStatus == (isLeft ? LatestVersionStatus.RIGHT_ONLY : LatestVersionStatus.LEFT_ONLY); + } + + public enum LatestVersionStatusChangelog { + NO_CHANGES, + LEFT_SIDE_CHANGED, + RIGHT_SIDE_CHANGED + } + + /** + * Update {@link Relationship#latestVersionStatus} of the given relationship. + * If isLatest = true, this method will never throw IllegalStateException. + * If isLatest = false, you should make sure that the selected side of given relationship + * currently has "latest" status, otherwise IllegalStateException will be thrown. + * @param relationship the relationship. + * @param updateLeftSide whether the status of the left item or the right item should be updated. + * @param isLatest to what the status should be set. + * @throws IllegalStateException if the operation would result in both the left side and the right side + * being set to non-latest. + */ + public LatestVersionStatusChangelog updateLatestVersionStatus( + Relationship relationship, boolean updateLeftSide, boolean isLatest + ) throws IllegalStateException { + LatestVersionStatus lvs = relationship.getLatestVersionStatus(); + + boolean leftSideIsLatest = lvs == LatestVersionStatus.BOTH || lvs == LatestVersionStatus.LEFT_ONLY; + boolean rightSideIsLatest = lvs == LatestVersionStatus.BOTH || lvs == LatestVersionStatus.RIGHT_ONLY; + + if (updateLeftSide) { + if (leftSideIsLatest == isLatest) { + return NO_CHANGES; // no change needed + } + leftSideIsLatest = isLatest; + } else { + if (rightSideIsLatest == isLatest) { + return NO_CHANGES; // no change needed + } + rightSideIsLatest = isLatest; + } + + LatestVersionStatus newVersionStatus; + if (leftSideIsLatest && rightSideIsLatest) { + newVersionStatus = LatestVersionStatus.BOTH; + } else if (leftSideIsLatest) { + newVersionStatus = LatestVersionStatus.LEFT_ONLY; + } else if (rightSideIsLatest) { + newVersionStatus = LatestVersionStatus.RIGHT_ONLY; + } else { + String msg = String.format( + "Illegal state: cannot set %s item to latest = false, because relationship with id %s, " + + "rightward name %s between left item with uuid %s, handle %s and right item with uuid %s, handle %s " + + "has latest version status set to %s", + updateLeftSide ? "left" : "right", relationship.getID(), + relationship.getRelationshipType().getRightwardType(), + relationship.getLeftItem().getID(), relationship.getLeftItem().getHandle(), + relationship.getRightItem().getID(), relationship.getRightItem().getHandle(), lvs + ); + log.error(msg); + throw new IllegalStateException(msg); + } + + log.info( + "set latest version status from {} to {} for relationship with id {}, rightward name {} " + + "between left item with uuid {}, handle {} and right item with uuid {}, handle {}", + lvs, newVersionStatus, relationship.getID(), relationship.getRelationshipType().getRightwardType(), + relationship.getLeftItem().getID(), relationship.getLeftItem().getHandle(), + relationship.getRightItem().getID(), relationship.getRightItem().getHandle() + ); + relationship.setLatestVersionStatus(newVersionStatus); + + return updateLeftSide ? LEFT_SIDE_CHANGED : RIGHT_SIDE_CHANGED; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/vocabulary/ControlledVocabulary.java b/dspace-api/src/main/java/org/dspace/vocabulary/ControlledVocabulary.java index ee1b0445bb6c..7f2bdc6ef771 100644 --- a/dspace-api/src/main/java/org/dspace/vocabulary/ControlledVocabulary.java +++ b/dspace-api/src/main/java/org/dspace/vocabulary/ControlledVocabulary.java @@ -15,8 +15,11 @@ import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; -import org.apache.xpath.XPathAPI; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.w3c.dom.Document; @@ -56,7 +59,7 @@ public ControlledVocabulary(String id, String label, String value, List subVocabularies = new ArrayList<>(subNodes.getLength()); for (int i = 0; i < subNodes.getLength(); i++) { diff --git a/dspace-api/src/main/java/org/dspace/web/ContextUtil.java b/dspace-api/src/main/java/org/dspace/web/ContextUtil.java new file mode 100644 index 000000000000..4bdf26c05381 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/web/ContextUtil.java @@ -0,0 +1,210 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.web; + +import java.sql.SQLException; +import java.util.Enumeration; +import java.util.Locale; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; +import org.dspace.core.Context; +import org.dspace.core.I18nUtil; +import org.dspace.services.RequestService; +import org.dspace.services.model.Request; +import org.dspace.utils.DSpace; + +/** + * Miscellaneous UI utility methods methods for managing DSpace context. + * + * This class was "adapted" from the class of the same name in old XMLUI. + * + * @author Tim Donohue + */ +public class ContextUtil { + /** + * The log4j logger + */ + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ContextUtil.class); + + /** + * Where the context is stored on an HTTP Request object + */ + public static final String DSPACE_CONTEXT = "dspace.context"; + + /** + * Default constructor + */ + private ContextUtil() { } + + /** + * Inspection method to check if a DSpace context has been created for this request. + * + * @param request the servlet request object + * @return True if a context has previously been created, false otherwise. + */ + public static boolean isContextAvailable(ServletRequest request) { + Object object = request.getAttribute(DSPACE_CONTEXT); + + if (object instanceof Context) { + return true; + } else { + return false; + } + } + + /** + * Obtain a new context object. If a context object has already been created + * for this HTTP request, it is re-used, otherwise it is created. + * + * @param request the servlet request object + * @return a context object + */ + public static Context obtainContext(HttpServletRequest request) { + Context context = (Context) request.getAttribute(DSPACE_CONTEXT); + + if (context == null) { + try { + context = ContextUtil.initializeContext(); + } catch (SQLException e) { + log.error("Unable to initialize context", e); + return null; + } + + // Store the context in the request + request.setAttribute(DSPACE_CONTEXT, context); + } + // this need to be verified each time that the context is extracted from the request + // as some call happen before that the login process is completed and user settings can + // change the locale + Locale currentLocale = getLocale(context, request); + context.setCurrentLocale(currentLocale); + return context; + } + + /** + * Shortcut for {@link #obtainContext(Request)} using the {@link RequestService} + * to retrieve the current thread request + * + * @return the DSpace Context associated with the current thread-bound request + */ + public static Context obtainCurrentRequestContext() { + Context context = null; + RequestService requestService = new DSpace().getRequestService(); + Request currentRequest = requestService.getCurrentRequest(); + if (currentRequest != null) { + context = ContextUtil.obtainContext(currentRequest.getHttpServletRequest()); + } + return context; + } + + private static Locale getLocale(Context context, HttpServletRequest request) { + Locale userLocale = null; + Locale supportedLocale = null; + + // Locales requested from client + String locale = request.getHeader("Accept-Language"); + if (StringUtils.isNotBlank(locale)) { + Enumeration locales = request.getLocales(); + if (locales != null) { + while (locales.hasMoreElements()) { + Locale current = locales.nextElement(); + if (I18nUtil.isSupportedLocale(current)) { + userLocale = current; + break; + } + } + } + } + if (userLocale == null && context.getCurrentUser() != null) { + String userLanguage = context.getCurrentUser().getLanguage(); + if (userLanguage != null) { + userLocale = new Locale(userLanguage); + } + } + if (userLocale == null) { + return I18nUtil.getDefaultLocale(); + } + supportedLocale = I18nUtil.getSupportedLocale(userLocale); + return supportedLocale; + } + + /** + * Initialize a new Context object + * + * @return a DSpace Context Object + * @throws SQLException + */ + private static Context initializeContext() throws SQLException { + // Create a new Context + Context context = new Context(); + // Set the session ID + /**context.setExtraLogInfo("session_id=" + + request.getSession().getId()); + + AuthenticationUtil.resumeLogin(context, request); + + // Set any special groups - invoke the authentication mgr. + int[] groupIDs = AuthenticationManager.getSpecialGroups(context, request); + + for (int i = 0; i < groupIDs.length; i++) + { + context.setSpecialGroup(groupIDs[i]); + log.debug("Adding Special Group id="+String.valueOf(groupIDs[i])); + } + + // Set the session ID and IP address + String ip = request.getRemoteAddr(); + if (useProxies == null) { + useProxies = ConfigurationManager.getBooleanProperty("useProxies", false); + } + if(useProxies && request.getHeader("X-Forwarded-For") != null) + { + // This header is a comma delimited list + for(String xfip : request.getHeader("X-Forwarded-For").split(",")) + { + if(!request.getHeader("X-Forwarded-For").contains(ip)) + { + ip = xfip.trim(); + } + } + } + context.setExtraLogInfo("session_id=" + request.getSession().getId() + ":ip_addr=" + ip); + */ + + return context; + } + + /** + * Check if a context exists for this request, if so complete the context. + * + * @param request The request object + */ + public static void completeContext(ServletRequest request) throws ServletException { + Context context = (Context) request.getAttribute(DSPACE_CONTEXT); + + if (context != null && context.isValid()) { + try { + context.complete(); + } catch (SQLException e) { + throw new ServletException(e); + } + } + } + + public static void abortContext(ServletRequest request) { + Context context = (Context) request.getAttribute(DSPACE_CONTEXT); + + if (context != null && context.isValid()) { + context.abort(); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/workflow/WorkflowException.java b/dspace-api/src/main/java/org/dspace/workflow/WorkflowException.java index f05ab8048682..3de660f279dd 100644 --- a/dspace-api/src/main/java/org/dspace/workflow/WorkflowException.java +++ b/dspace-api/src/main/java/org/dspace/workflow/WorkflowException.java @@ -27,7 +27,8 @@ public WorkflowException(String reason) { this.reason = reason; } - public String toString() { + @Override + public String getMessage() { return reason; } } diff --git a/dspace-api/src/main/java/org/dspace/workflow/WorkflowService.java b/dspace-api/src/main/java/org/dspace/workflow/WorkflowService.java index 716b6cabd354..613c5821bcd1 100644 --- a/dspace-api/src/main/java/org/dspace/workflow/WorkflowService.java +++ b/dspace-api/src/main/java/org/dspace/workflow/WorkflowService.java @@ -18,6 +18,7 @@ import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.xmlworkflow.WorkflowConfigurationException; +import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; /** * Service interface class for the WorkflowService framework. @@ -100,6 +101,9 @@ public WorkspaceItem sendWorkflowItemBackSubmission(Context c, T workflowItem, E String rejection_message) throws SQLException, AuthorizeException, IOException; + public void restartWorkflow(Context context, XmlWorkflowItem wi, EPerson decliner, String provenance) + throws SQLException, AuthorizeException, IOException, WorkflowException; + public String getMyDSpaceLink(); public void deleteCollection(Context context, Collection collection) diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/Role.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/Role.java index bfc5654cdd20..5b5ba5c1d3ba 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/Role.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/Role.java @@ -41,6 +41,9 @@ public class Role implements BeanNameAware { @Autowired private WorkflowItemRoleService workflowItemRoleService; + // Whether or not to delete temporary group made attached to the WorkflowItemRole for this role in AutoAssignAction + private boolean deleteTemporaryGroup = false; + private String id; private String name; private String description; @@ -153,4 +156,17 @@ public void setScope(Scope scope) { public void setInternal(boolean internal) { isInternal = internal; } + + public boolean isDeleteTemporaryGroup() { + return deleteTemporaryGroup; + } + + /** + * Setter for config that indicated whether or not to delete temporary group made attached to the + * WorkflowItemRole for this role in AutoAssignAction + * @param deleteTemporaryGroup + */ + public void setDeleteTemporaryGroup(boolean deleteTemporaryGroup) { + this.deleteTemporaryGroup = deleteTemporaryGroup; + } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/RoleMembers.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/RoleMembers.java index 39e3110d6310..869f7444406f 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/RoleMembers.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/RoleMembers.java @@ -30,8 +30,8 @@ public class RoleMembers { protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); - private ArrayList groups; - private ArrayList epersons; + private final ArrayList groups; + private final ArrayList epersons; public RoleMembers() { this.groups = new ArrayList<>(); @@ -55,11 +55,7 @@ public void addEPerson(EPerson eperson) { } public void removeEperson(EPerson epersonToRemove) { - for (EPerson eperson : epersons) { - if (eperson.equals(epersonToRemove)) { - epersons.remove(eperson); - } - } + epersons.removeIf(eperson -> eperson.equals(epersonToRemove)); } public ArrayList getAllUniqueMembers(Context context) throws SQLException { diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowConfigurationException.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowConfigurationException.java index 5c2e1af487fd..e4e3ef759ee7 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowConfigurationException.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowConfigurationException.java @@ -8,7 +8,7 @@ package org.dspace.xmlworkflow; /** - * Exception for problems with the configuration xml + * Exception for problems with the configuration XML. * * @author Bram De Schouwer (bram.deschouwer at dot com) * @author Kevin Van de Velde (kevin at atmire dot com) @@ -17,13 +17,14 @@ */ public class WorkflowConfigurationException extends Exception { - private String error; + private final String error; public WorkflowConfigurationException(String error) { this.error = error; } - public String toString() { + @Override + public String getMessage() { return this.error; } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java index c651097fcbb9..aecdccd55af3 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/WorkflowRequirementsServiceImpl.java @@ -100,7 +100,7 @@ public void removeClaimedUser(Context context, XmlWorkflowItem wfi, EPerson user //Then remove the current user from the inProgressUsers inProgressUserService.delete(context, inProgressUserService.findByWorkflowItemAndEPerson(context, wfi, user)); - //Make sure the removed user has his custom rights removed + //Make sure the removed user has their custom rights removed xmlWorkflowService.removeUserItemPolicies(context, wfi.getItem(), user); Workflow workflow = workflowFactory.getWorkflow(wfi.getCollection()); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java index d77eb16ea767..bc91a1fd9298 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowServiceImpl.java @@ -45,7 +45,7 @@ import org.dspace.core.Context; import org.dspace.core.Email; import org.dspace.core.I18nUtil; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.curate.service.XmlWorkflowCuratorService; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; @@ -221,6 +221,8 @@ public XmlWorkflowItem start(Context context, WorkspaceItem wsi) //Get our next step, if none is found, archive our item firstStep = wf.getNextStep(context, wfi, firstStep, ActionResult.OUTCOME_COMPLETE); if (firstStep == null) { + // record the submitted provenance message + recordStart(context, wfi.getItem(),null); archive(context, wfi); } else { activateFirstStep(context, wf, firstStep, wfi); @@ -317,7 +319,7 @@ protected void activateFirstStep(Context context, Workflow wf, Step firstStep, X // current step cannot be completed and we must exit immediately. if (!xmlWorkflowCuratorService.doCuration(context, wfi)) { // don't proceed - either curation tasks queued, or item rejected - log.info(LogManager.getHeader(context, "start_workflow", + log.info(LogHelper.getHeader(context, "start_workflow", "workflow_item_id=" + wfi.getID() + ",item_id=" + wfi.getItem().getID() + ",collection_id=" + wfi.getCollection().getID() @@ -328,7 +330,7 @@ protected void activateFirstStep(Context context, Workflow wf, Step firstStep, X // Activate the step. firstActionConfig.getProcessingAction().activate(context, wfi); - log.info(LogManager.getHeader(context, "start_workflow", + log.info(LogHelper.getHeader(context, "start_workflow", firstActionConfig.getProcessingAction() + " workflow_item_id=" + wfi.getID() + "item_id=" + wfi.getItem().getID() @@ -366,7 +368,7 @@ public WorkflowActionConfig doState(Context c, EPerson user, // current step cannot be completed and we must exit immediately. if (!xmlWorkflowCuratorService.doCuration(c, wi)) { // don't proceed - either curation tasks queued, or item rejected - log.info(LogManager.getHeader(c, "advance_workflow", + log.info(LogHelper.getHeader(c, "advance_workflow", "workflow_item_id=" + wi.getID() + ",item_id=" + wi.getItem().getID() + ",collection_id=" + wi.getCollection().getID() @@ -391,7 +393,7 @@ public WorkflowActionConfig doState(Context c, EPerson user, throw new AuthorizeException("You are not allowed to to perform this task."); } } catch (WorkflowConfigurationException e) { - log.error(LogManager.getHeader(c, "error while executing state", + log.error(LogHelper.getHeader(c, "error while executing state", "workflow: " + workflow.getID() + " action: " + currentActionConfig.getId() + " workflowItemId: " + workflowItemId), e); @@ -447,7 +449,7 @@ public WorkflowActionConfig processOutcome(Context c, EPerson user, Workflow wor enteredNewStep); } } else if (enteredNewStep) { - // If the user finished his/her step, we keep processing until there is a UI step action or no + // If the user finished their step, we keep processing until there is a UI step action or no // step at all nextStep = workflow.getNextStep(c, wfi, currentStep, currentOutcome.getResult()); c.turnOffAuthorisationSystem(); @@ -509,7 +511,7 @@ public WorkflowActionConfig processOutcome(Context c, EPerson user, Workflow wor } - log.error(LogManager.getHeader(c, "Invalid step outcome", "Workflow item id: " + wfi.getID())); + log.error(LogHelper.getHeader(c, "Invalid step outcome", "Workflow item id: " + wfi.getID())); throw new WorkflowException("Invalid step outcome"); } @@ -558,7 +560,7 @@ protected void logWorkflowEvent(Context c, String workflowId, String previousSte DSpaceServicesFactory.getInstance().getEventService().fireEvent(usageWorkflowEvent); } catch (SQLException e) { //Catch all errors we do not want our workflow to crash because the logging threw an exception - log.error(LogManager.getHeader(c, "Error while logging workflow event", "Workflow Item: " + wfi.getID()), + log.error(LogHelper.getHeader(c, "Error while logging workflow event", "Workflow Item: " + wfi.getID()), e); } } @@ -615,7 +617,7 @@ protected Item archive(Context context, XmlWorkflowItem wfi) // Remove (if any) the workflowItemroles for this item workflowItemRoleService.deleteForWorkflowItem(context, wfi); - log.info(LogManager.getHeader(context, "archive_item", "workflow_item_id=" + log.info(LogHelper.getHeader(context, "archive_item", "workflow_item_id=" + wfi.getID() + "item_id=" + item.getID() + "collection_id=" + collection.getID())); @@ -630,7 +632,7 @@ protected Item archive(Context context, XmlWorkflowItem wfi) itemService.update(context, item); // Log the event - log.info(LogManager.getHeader(context, "install_item", "workflow_item_id=" + log.info(LogHelper.getHeader(context, "install_item", "workflow_item_id=" + wfi.getID() + ", item_id=" + item.getID() + "handle=FIXME")); return item; @@ -680,8 +682,8 @@ protected void notifyOfArchive(Context context, Item item, Collection coll) email.send(); } } catch (MessagingException e) { - log.warn(LogManager.getHeader(context, "notifyOfArchive", - "cannot email user" + " item_id=" + item.getID())); + log.warn(LogHelper.getHeader(context, "notifyOfArchive", + "cannot email user" + " item_id=" + item.getID()), e); } } @@ -713,9 +715,9 @@ public void notifyOfCuration(Context c, XmlWorkflowItem wi, email.send(); } } catch (MessagingException e) { - log.warn(LogManager.getHeader(c, "notifyOfCuration", + log.warn(LogHelper.getHeader(c, "notifyOfCuration", "cannot email users of workflow_item_id " + wi.getID() - + ": " + e.getMessage())); + + ": " + e.getMessage()), e); } } @@ -938,7 +940,7 @@ public void removeUserItemPolicies(Context context, Item item, EPerson e) throws authorizeService.removeEPersonPolicies(context, bitstream, e); } } - // Ensure that the submitter always retains his resource policies + // Ensure that the submitter always retains their resource policies if (e.getID().equals(item.getSubmitter().getID())) { grantSubmitterReadPolicies(context, item); } @@ -981,7 +983,7 @@ public void deleteWorkflowByWorkflowItem(Context context, XmlWorkflowItem wi, EP xmlWorkflowItemService.deleteWrapper(context, wi); // Now delete the item itemService.delete(context, myitem); - log.info(LogManager.getHeader(context, "delete_workflow", "workflow_item_id=" + log.info(LogHelper.getHeader(context, "delete_workflow", "workflow_item_id=" + workflowID + "item_id=" + itemID + "collection_id=" + collID + "eperson_id=" + e.getID())); @@ -1031,15 +1033,16 @@ public WorkspaceItem sendWorkflowItemBackSubmission(Context context, XmlWorkflow itemService.update(context, myitem); - // convert into personal workspace - WorkspaceItem wsi = returnToWorkspace(context, wi); - // remove policy for controller removeUserItemPolicies(context, myitem, e); revokeReviewerPolicies(context, myitem); + + // convert into personal workspace + WorkspaceItem wsi = returnToWorkspace(context, wi); + // notify that it's been rejected notifyOfReject(context, wi, e, rejection_message); - log.info(LogManager.getHeader(context, "reject_workflow", "workflow_item_id=" + log.info(LogHelper.getHeader(context, "reject_workflow", "workflow_item_id=" + wi.getID() + "item_id=" + wi.getItem().getID() + "collection_id=" + wi.getCollection().getID() + "eperson_id=" + e.getID())); @@ -1063,7 +1066,7 @@ public WorkspaceItem abort(Context c, XmlWorkflowItem wi, EPerson e) // convert into personal workspace WorkspaceItem wsi = returnToWorkspace(c, wi); - log.info(LogManager.getHeader(c, "abort_workflow", "workflow_item_id=" + log.info(LogHelper.getHeader(c, "abort_workflow", "workflow_item_id=" + wi.getID() + "item_id=" + wsi.getItem().getID() + "collection_id=" + wi.getCollection().getID() + "eperson_id=" + e.getID())); @@ -1075,6 +1078,53 @@ public WorkspaceItem abort(Context c, XmlWorkflowItem wi, EPerson e) return wsi; } + @Override + public void restartWorkflow(Context context, XmlWorkflowItem wi, EPerson decliner, String provenance) + throws SQLException, AuthorizeException, IOException, WorkflowException { + if (!authorizeService.isAdmin(context)) { + throw new AuthorizeException("You must be an admin to restart a workflow"); + } + context.turnOffAuthorisationSystem(); + + // rejection provenance + Item myitem = wi.getItem(); + + // Here's what happened + String provDescription = + provenance + " Declined by " + getEPersonName(decliner) + " on " + DCDate.getCurrent().toString() + + " (GMT) "; + + // Add to item as a DC field + itemService + .addMetadata(context, myitem, MetadataSchemaEnum.DC.getName(), + "description", "provenance", "en", provDescription); + + //Clear any workflow schema related metadata + itemService + .clearMetadata(context, myitem, WorkflowRequirementsService.WORKFLOW_SCHEMA, Item.ANY, Item.ANY, Item.ANY); + + itemService.update(context, myitem); + + // remove policy for controller + removeUserItemPolicies(context, myitem, decliner); + revokeReviewerPolicies(context, myitem); + + // convert into personal workspace + WorkspaceItem wsi = returnToWorkspace(context, wi); + + // Because of issue of xmlWorkflowItemService not realising wfi wrapper has been deleted + context.commit(); + wsi = context.reloadEntity(wsi); + + log.info(LogHelper.getHeader(context, "decline_workflow", "workflow_item_id=" + + wi.getID() + "item_id=" + wi.getItem().getID() + "collection_id=" + wi.getCollection().getID() + + "eperson_id=" + decliner.getID())); + + // Restart workflow + this.startWithoutNotify(context, wsi); + context.restoreAuthSystemState(); + } + /** * Return the workflow item to the workspace of the submitter. The workflow * item is removed, and a workspace item created. @@ -1114,7 +1164,7 @@ protected WorkspaceItem returnToWorkspace(Context c, XmlWorkflowItem wfi) workspaceItemService.update(c, workspaceItem); //myitem.update(); - log.info(LogManager.getHeader(c, "return_to_workspace", + log.info(LogHelper.getHeader(c, "return_to_workspace", "workflow_item_id=" + wfi.getID() + "workspace_item_id=" + workspaceItem.getID())); @@ -1139,25 +1189,30 @@ protected void recordStart(Context context, Item myitem, Action action) DCDate now = DCDate.getCurrent(); // Create provenance description - String provmessage = ""; + StringBuffer provmessage = new StringBuffer(); if (myitem.getSubmitter() != null) { - provmessage = "Submitted by " + myitem.getSubmitter().getFullName() - + " (" + myitem.getSubmitter().getEmail() + ") on " - + now.toString() + " workflow start=" + action.getProvenanceStartId() + "\n"; + provmessage.append("Submitted by ").append(myitem.getSubmitter().getFullName()) + .append(" (").append(myitem.getSubmitter().getEmail()).append(") on ") + .append(now.toString()); } else { // else, null submitter - provmessage = "Submitted by unknown (probably automated) on" - + now.toString() + " workflow start=" + action.getProvenanceStartId() + "\n"; + provmessage.append("Submitted by unknown (probably automated) on") + .append(now.toString()); + } + if (action != null) { + provmessage.append(" workflow start=").append(action.getProvenanceStartId()).append("\n"); + } else { + provmessage.append("\n"); } // add sizes and checksums of bitstreams - provmessage += installItemService.getBitstreamProvenanceMessage(context, myitem); + provmessage.append(installItemService.getBitstreamProvenanceMessage(context, myitem)); // Add message to the DC itemService .addMetadata(context, myitem, MetadataSchemaEnum.DC.getName(), - "description", "provenance", "en", provmessage); + "description", "provenance", "en", provmessage.toString()); itemService.update(context, myitem); } @@ -1192,10 +1247,10 @@ protected void notifyOfReject(Context c, XmlWorkflowItem wi, EPerson e, } } catch (IOException | MessagingException ex) { // log this email error - log.warn(LogManager.getHeader(c, "notify_of_reject", + log.warn(LogHelper.getHeader(c, "notify_of_reject", "cannot email user" + " eperson_id" + e.getID() + " eperson_email" + e.getEmail() - + " workflow_item_id" + wi.getID())); + + " workflow_item_id" + wi.getID()), ex); } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/migration/RestartWorkflow.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/migration/RestartWorkflow.java index 60e520eee096..849010751831 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/migration/RestartWorkflow.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/migration/RestartWorkflow.java @@ -19,7 +19,7 @@ import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; @@ -137,7 +137,7 @@ public static void main(String[] args) { WorkspaceItem wsi = workflowService .sendWorkflowItemBackSubmission(context, workflowItem, myEPerson, provenance, ""); - log.info(LogManager.getHeader(context, "restart_workflow", "workflow_item_id=" + log.info(LogHelper.getHeader(context, "restart_workflow", "workflow_item_id=" + workflowItem.getID() + "item_id=" + workflowItem.getItem().getID() + "collection_id=" + workflowItem.getCollection().getID())); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java index 636007344c1b..fd081b3a1bf4 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Workflow.java @@ -8,7 +8,7 @@ package org.dspace.xmlworkflow.state; import java.sql.SQLException; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -118,7 +118,7 @@ public void setSteps(List steps) { * @return a map containing the roles, the role name will the key, the role itself the value */ public Map getRoles() { - Map roles = new HashMap<>(); + Map roles = new LinkedHashMap<>(); for (Step step : steps) { if (step.getRole() != null) { roles.put(step.getRole().getId(), step.getRole()); diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/Action.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/Action.java index 0aabfab0573a..1cfa33b12170 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/Action.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/Action.java @@ -14,10 +14,15 @@ import javax.servlet.http.HttpServletRequest; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DCDate; +import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; import org.dspace.core.Context; import org.dspace.workflow.WorkflowException; import org.dspace.xmlworkflow.RoleMembers; import org.dspace.xmlworkflow.WorkflowConfigurationException; +import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.state.Step; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; @@ -37,6 +42,8 @@ public abstract class Action { private WorkflowActionConfig parent; private static final String ERROR_FIELDS_ATTRIBUTE = "dspace.workflow.error_fields"; + private List advancedOptions = new ArrayList<>(); + private List advancedInfo = new ArrayList<>(); /** * Called when a workflow item becomes eligible for this Action. @@ -192,4 +199,58 @@ protected void addErrorField(HttpServletRequest request, String fieldName) { //save updated list setErrorFields(request, errorFields); } + + /** + * Returns a list of advanced options that the user can select at this action + * @return A list of advanced options of this action, resulting in the next step of the workflow + */ + protected List getAdvancedOptions() { + return advancedOptions; + } + + /** + * Returns true if this Action has advanced options, false if it doesn't + * @return true if there are advanced options, false otherwise + */ + protected boolean isAdvanced() { + return !getAdvancedOptions().isEmpty(); + } + + /** + * Returns a list of advanced info required by the advanced options + * @return A list of advanced info required by the advanced options + */ + protected List getAdvancedInfo() { + return advancedInfo; + } + + + /** + * Adds info in the metadata field dc.description.provenance about item being approved containing in which step + * it was approved, which user approved it and the time + * + * @param c DSpace contect + * @param wfi Workflow item we're adding workflow accept provenance on + */ + public void addApprovedProvenance(Context c, XmlWorkflowItem wfi) throws SQLException, AuthorizeException { + ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + + //Add the provenance for the accept + String now = DCDate.getCurrent().toString(); + + // Get user's name + email address + String usersName = + XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService().getEPersonName(c.getCurrentUser()); + + String provDescription = getProvenanceStartId() + " Approved for entry into archive by " + usersName + " on " + + now + " (GMT) "; + + // Add to item as a DC field + c.turnOffAuthorisationSystem(); + itemService.addMetadata(c, wfi.getItem(), MetadataSchemaEnum.DC.getName(), "description", "provenance", "en", + provDescription); + itemService.update(c, wfi.getItem()); + c.restoreAuthSystemState(); + } + } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/ActionAdvancedInfo.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/ActionAdvancedInfo.java new file mode 100644 index 000000000000..b49fdb34f869 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/ActionAdvancedInfo.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xmlworkflow.state.actions; + +/** + * Interface for the shared properties of an 'advancedInfo' section of an advanced workflow {@link Action} + * Implementations of this class will define the specific fields per action that will need to be defined/configured + * to pass along this info to REST endpoint + */ +public abstract class ActionAdvancedInfo { + + protected String type; + protected String id; + + protected final static String TYPE_PREFIX = "action_info_"; + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = TYPE_PREFIX + type; + } + + public String getId() { + return id; + } + + /** + * Setter for the Action id to be set. + * This is an MD5 hash of the type and the stringified properties of the advanced info + * + * @param type The type of this Action to be included in the MD5 hash + */ + protected abstract void generateId(String type); + +} diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/WorkflowActionConfig.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/WorkflowActionConfig.java index 1dc61888b140..3475b04c7478 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/WorkflowActionConfig.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/WorkflowActionConfig.java @@ -69,4 +69,28 @@ public List getOptions() { return this.processingAction.getOptions(); } + /** + * Returns a list of advanced options this user has on this action, resulting in the next step of the workflow + * @return A list of advanced options of this action, resulting in the next step of the workflow + */ + public List getAdvancedOptions() { + return this.processingAction.getAdvancedOptions(); + } + + /** + * Returns a boolean depending on whether this action has advanced options + * @return The boolean indicating whether this action has advanced options + */ + public boolean isAdvanced() { + return this.processingAction.isAdvanced(); + } + + /** + * Returns a Map of info for the advanced options this user has on this action + * @return a Map of info for the advanced options this user has on this action + */ + public List getAdvancedInfo() { + return this.processingAction.getAdvancedInfo(); + } + } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/AcceptEditRejectAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/AcceptEditRejectAction.java index 743d00b2b6e9..67b400c6592e 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/AcceptEditRejectAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/AcceptEditRejectAction.java @@ -15,8 +15,6 @@ import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; -import org.dspace.content.DCDate; -import org.dspace.content.MetadataSchemaEnum; import org.dspace.core.Context; import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.state.Step; @@ -34,8 +32,6 @@ */ public class AcceptEditRejectAction extends ProcessingAction { - private static final String SUBMIT_APPROVE = "submit_approve"; - private static final String SUBMIT_REJECT = "submit_reject"; private static final String SUBMITTER_IS_DELETED_PAGE = "submitter_deleted"; //TODO: rename to AcceptAndEditMetadataAction @@ -53,7 +49,7 @@ public ActionResult execute(Context c, XmlWorkflowItem wfi, Step step, HttpServl case SUBMIT_APPROVE: return processAccept(c, wfi); case SUBMIT_REJECT: - return processRejectPage(c, wfi, request); + return super.processRejectPage(c, wfi, request); case SUBMITTER_IS_DELETED_PAGE: return processSubmitterIsDeletedPage(c, wfi, request); default: @@ -69,33 +65,18 @@ public List getOptions() { options.add(SUBMIT_APPROVE); options.add(SUBMIT_REJECT); options.add(ProcessingAction.SUBMIT_EDIT_METADATA); + options.add(RETURN_TO_POOL); return options; } public ActionResult processAccept(Context c, XmlWorkflowItem wfi) throws SQLException, AuthorizeException { //Delete the tasks - addApprovedProvenance(c, wfi); + super.addApprovedProvenance(c, wfi); return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); } - public ActionResult processRejectPage(Context c, XmlWorkflowItem wfi, HttpServletRequest request) - throws SQLException, AuthorizeException, IOException { - String reason = request.getParameter("reason"); - if (reason == null || 0 == reason.trim().length()) { - addErrorField(request, "reason"); - return new ActionResult(ActionResult.TYPE.TYPE_ERROR); - } - - // We have pressed reject, so remove the task the user has & put it back - // to a workspace item - XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService().sendWorkflowItemBackSubmission(c, wfi, - c.getCurrentUser(), this.getProvenanceStartId(), reason); - - return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); - } - public ActionResult processSubmitterIsDeletedPage(Context c, XmlWorkflowItem wfi, HttpServletRequest request) throws SQLException, AuthorizeException, IOException { if (request.getParameter("submit_delete") != null) { @@ -111,21 +92,4 @@ public ActionResult processSubmitterIsDeletedPage(Context c, XmlWorkflowItem wfi return new ActionResult(ActionResult.TYPE.TYPE_PAGE); } } - - private void addApprovedProvenance(Context c, XmlWorkflowItem wfi) throws SQLException, AuthorizeException { - //Add the provenance for the accept - String now = DCDate.getCurrent().toString(); - - // Get user's name + email address - String usersName = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() - .getEPersonName(c.getCurrentUser()); - - String provDescription = getProvenanceStartId() + " Approved for entry into archive by " - + usersName + " on " + now + " (GMT) "; - - // Add to item as a DC field - itemService.addMetadata(c, wfi.getItem(), MetadataSchemaEnum.DC.getName(), "description", "provenance", "en", - provDescription); - itemService.update(c, wfi.getItem()); - } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/FinalEditAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/FinalEditAction.java index 3c4e0ffc1d71..9b83be5d7bfa 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/FinalEditAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/FinalEditAction.java @@ -14,10 +14,7 @@ import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; -import org.dspace.content.DCDate; -import org.dspace.content.MetadataSchemaEnum; import org.dspace.core.Context; -import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.state.Step; import org.dspace.xmlworkflow.state.actions.ActionResult; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; @@ -52,7 +49,7 @@ public ActionResult processMainPage(Context c, XmlWorkflowItem wfi, HttpServletR switch (Util.getSubmitButton(request, SUBMIT_CANCEL)) { case SUBMIT_APPROVE: //Delete the tasks - addApprovedProvenance(c, wfi); + super.addApprovedProvenance(c, wfi); return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); default: //We pressed the leave button so return to our submissions page @@ -67,25 +64,8 @@ public List getOptions() { List options = new ArrayList<>(); options.add(SUBMIT_APPROVE); options.add(ProcessingAction.SUBMIT_EDIT_METADATA); + options.add(RETURN_TO_POOL); return options; } - private void addApprovedProvenance(Context c, XmlWorkflowItem wfi) throws SQLException, AuthorizeException { - //Add the provenance for the accept - String now = DCDate.getCurrent().toString(); - - // Get user's name + email address - String usersName = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() - .getEPersonName(c.getCurrentUser()); - - String provDescription = getProvenanceStartId() + " Approved for entry into archive by " - + usersName + " on " + now + " (GMT) "; - - // Add to item as a DC field - itemService.addMetadata(c, wfi.getItem(), MetadataSchemaEnum.DC.getName(), "description", "provenance", "en", - provDescription); - itemService.update(c, wfi.getItem()); - } - - } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ProcessingAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ProcessingAction.java index 8b8358a8d632..7a1c62adbd1e 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ProcessingAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ProcessingAction.java @@ -7,12 +7,16 @@ */ package org.dspace.xmlworkflow.state.actions.processingaction; +import java.io.IOException; import java.sql.SQLException; import javax.servlet.http.HttpServletRequest; +import org.dspace.authorize.AuthorizeException; import org.dspace.content.service.ItemService; import org.dspace.core.Context; +import org.dspace.xmlworkflow.service.XmlWorkflowService; import org.dspace.xmlworkflow.state.actions.Action; +import org.dspace.xmlworkflow.state.actions.ActionResult; import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService; @@ -32,9 +36,15 @@ public abstract class ProcessingAction extends Action { protected ClaimedTaskService claimedTaskService; @Autowired(required = true) protected ItemService itemService; + @Autowired + protected XmlWorkflowService xmlWorkflowService; public static final String SUBMIT_EDIT_METADATA = "submit_edit_metadata"; public static final String SUBMIT_CANCEL = "submit_cancel"; + protected static final String SUBMIT_APPROVE = "submit_approve"; + protected static final String SUBMIT_REJECT = "submit_reject"; + protected static final String RETURN_TO_POOL = "return_to_pool"; + protected static final String REJECT_REASON = "reason"; @Override public boolean isAuthorized(Context context, HttpServletRequest request, XmlWorkflowItem wfi) throws SQLException { @@ -48,4 +58,31 @@ public boolean isAuthorized(Context context, HttpServletRequest request, XmlWork task.getStepID().equals(getParent().getStep().getId()) && task.getActionID().equals(getParent().getId()); } + + /** + * Process result when option {@link this#SUBMIT_REJECT} is selected. + * - Sets the reason and workflow step responsible on item in dc.description.provenance + * - Send workflow back to the submission + * If reason is not given => error + */ + public ActionResult processRejectPage(Context c, XmlWorkflowItem wfi, HttpServletRequest request) + throws SQLException, AuthorizeException, IOException { + String reason = request.getParameter(REJECT_REASON); + if (reason == null || 0 == reason.trim().length()) { + addErrorField(request, REJECT_REASON); + return new ActionResult(ActionResult.TYPE.TYPE_ERROR); + } + + // We have pressed reject, so remove the task the user has & put it back + // to a workspace item + xmlWorkflowService.sendWorkflowItemBackSubmission(c, wfi, c.getCurrentUser(), this.getProvenanceStartId(), + reason); + + return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); + } + + @Override + protected boolean isAdvanced() { + return !getAdvancedOptions().isEmpty(); + } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ReviewAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ReviewAction.java index 8474757be65c..bd74ab3c7152 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ReviewAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ReviewAction.java @@ -15,8 +15,6 @@ import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; -import org.dspace.content.DCDate; -import org.dspace.content.MetadataSchemaEnum; import org.dspace.core.Context; import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.state.Step; @@ -36,11 +34,8 @@ public class ReviewAction extends ProcessingAction { public static final int MAIN_PAGE = 0; public static final int REJECT_PAGE = 1; - private static final String SUBMIT_APPROVE = "submit_approve"; - private static final String SUBMIT_REJECT = "submit_reject"; private static final String SUBMITTER_IS_DELETED_PAGE = "submitter_deleted"; - @Override public void activate(Context c, XmlWorkflowItem wfItem) { @@ -54,7 +49,7 @@ public ActionResult execute(Context c, XmlWorkflowItem wfi, Step step, HttpServl case SUBMIT_APPROVE: return processAccept(c, wfi); case SUBMIT_REJECT: - return processRejectPage(c, wfi, step, request); + return super.processRejectPage(c, wfi, request); case SUBMITTER_IS_DELETED_PAGE: return processSubmitterIsDeletedPage(c, wfi, request); default: @@ -69,50 +64,15 @@ public List getOptions() { List options = new ArrayList<>(); options.add(SUBMIT_APPROVE); options.add(SUBMIT_REJECT); + options.add(RETURN_TO_POOL); return options; } public ActionResult processAccept(Context c, XmlWorkflowItem wfi) throws SQLException, AuthorizeException { - //Delete the tasks - addApprovedProvenance(c, wfi); + super.addApprovedProvenance(c, wfi); return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); } - private void addApprovedProvenance(Context c, XmlWorkflowItem wfi) throws SQLException, AuthorizeException { - //Add the provenance for the accept - String now = DCDate.getCurrent().toString(); - - // Get user's name + email address - String usersName = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() - .getEPersonName(c.getCurrentUser()); - - String provDescription = getProvenanceStartId() + " Approved for entry into archive by " - + usersName + " on " + now + " (GMT) "; - - // Add to item as a DC field - itemService.addMetadata(c, wfi.getItem(), MetadataSchemaEnum.DC.getName(), "description", "provenance", "en", - provDescription); - itemService.update(c, wfi.getItem()); - } - - public ActionResult processRejectPage(Context c, XmlWorkflowItem wfi, Step step, HttpServletRequest request) - throws SQLException, AuthorizeException, IOException { - String reason = request.getParameter("reason"); - if (reason == null || 0 == reason.trim().length()) { - request.setAttribute("page", REJECT_PAGE); - addErrorField(request, "reason"); - return new ActionResult(ActionResult.TYPE.TYPE_ERROR); - } - - //We have pressed reject, so remove the task the user has & put it back to a workspace item - XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() - .sendWorkflowItemBackSubmission(c, wfi, c.getCurrentUser(), - this.getProvenanceStartId(), reason); - - - return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); - } - public ActionResult processSubmitterIsDeletedPage(Context c, XmlWorkflowItem wfi, HttpServletRequest request) throws SQLException, AuthorizeException, IOException { if (request.getParameter("submit_delete") != null) { diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreEvaluationAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreEvaluationAction.java index a8346411114e..16d35b36683a 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreEvaluationAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreEvaluationAction.java @@ -7,6 +7,9 @@ */ package org.dspace.xmlworkflow.state.actions.processingaction; +import static org.dspace.xmlworkflow.state.actions.processingaction.ScoreReviewAction.REVIEW_FIELD; +import static org.dspace.xmlworkflow.state.actions.processingaction.ScoreReviewAction.SCORE_FIELD; + import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; @@ -19,7 +22,6 @@ import org.dspace.content.MetadataValue; import org.dspace.core.Context; import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; -import org.dspace.xmlworkflow.service.WorkflowRequirementsService; import org.dspace.xmlworkflow.state.Step; import org.dspace.xmlworkflow.state.actions.ActionResult; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; @@ -37,6 +39,7 @@ */ public class ScoreEvaluationAction extends ProcessingAction { + // Minimum aggregate of scores private int minimumAcceptanceScore; @Override @@ -47,43 +50,64 @@ public void activate(Context c, XmlWorkflowItem wf) { @Override public ActionResult execute(Context c, XmlWorkflowItem wfi, Step step, HttpServletRequest request) throws SQLException, AuthorizeException, IOException { - boolean hasPassed = false; - //Retrieve all our scores from the metadata & add em up + // Retrieve all our scores from the metadata & add em up + int scoreMean = getMeanScore(wfi); + //We have passed if we have at least gained our minimum score + boolean hasPassed = getMinimumAcceptanceScore() <= scoreMean; + //Whether or not we have passed, clear our score information + itemService.clearMetadata(c, wfi.getItem(), SCORE_FIELD.schema, SCORE_FIELD.element, SCORE_FIELD.qualifier, + Item.ANY); + if (hasPassed) { + this.addRatingInfoToProv(c, wfi, scoreMean); + return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); + } else { + //We haven't passed, reject our item + XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() + .sendWorkflowItemBackSubmission(c, wfi, c.getCurrentUser(), this.getProvenanceStartId(), + "The item was reject due to a bad review score."); + return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); + } + } + + private int getMeanScore(XmlWorkflowItem wfi) { List scores = itemService - .getMetadata(wfi.getItem(), WorkflowRequirementsService.WORKFLOW_SCHEMA, "score", null, Item.ANY); + .getMetadata(wfi.getItem(), SCORE_FIELD.schema, SCORE_FIELD.element, SCORE_FIELD.qualifier, Item.ANY); + int scoreMean = 0; if (0 < scores.size()) { int totalScoreCount = 0; for (MetadataValue score : scores) { totalScoreCount += Integer.parseInt(score.getValue()); } - int scoreMean = totalScoreCount / scores.size(); - //We have passed if we have at least gained our minimum score - hasPassed = getMinimumAcceptanceScore() <= scoreMean; - //Wether or not we have passed, clear our score information - itemService - .clearMetadata(c, wfi.getItem(), WorkflowRequirementsService.WORKFLOW_SCHEMA, "score", null, Item.ANY); + scoreMean = totalScoreCount / scores.size(); + } + return scoreMean; + } - String provDescription = getProvenanceStartId() + " Approved for entry into archive with a score of: " + - scoreMean; - itemService.addMetadata(c, wfi.getItem(), MetadataSchemaEnum.DC.getName(), - "description", "provenance", "en", provDescription); - itemService.update(c, wfi.getItem()); + private void addRatingInfoToProv(Context c, XmlWorkflowItem wfi, int scoreMean) + throws SQLException, AuthorizeException { + StringBuilder provDescription = new StringBuilder(); + provDescription.append(String.format("%s Approved for entry into archive with a score of: %s", + getProvenanceStartId(), scoreMean)); + List reviews = itemService + .getMetadata(wfi.getItem(), REVIEW_FIELD.schema, REVIEW_FIELD.element, REVIEW_FIELD.qualifier, Item.ANY); + if (!reviews.isEmpty()) { + provDescription.append(" | Reviews: "); } - if (hasPassed) { - return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); - } else { - //We haven't passed, reject our item - XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() - .sendWorkflowItemBackSubmission(c, wfi, c.getCurrentUser(), - this.getProvenanceStartId(), - "The item was reject due to a bad review score."); - return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); + for (MetadataValue review : reviews) { + provDescription.append(String.format("; %s", review.getValue())); } + c.turnOffAuthorisationSystem(); + itemService.addMetadata(c, wfi.getItem(), MetadataSchemaEnum.DC.getName(), + "description", "provenance", "en", provDescription.toString()); + itemService.update(c, wfi.getItem()); + c.restoreAuthSystemState(); } @Override public List getOptions() { - return new ArrayList<>(); + List options = new ArrayList<>(); + options.add(RETURN_TO_POOL); + return options; } public int getMinimumAcceptanceScore() { diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewAction.java index c28fe2d93ef8..43a3decacc7e 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewAction.java @@ -9,14 +9,20 @@ import java.sql.SQLException; import java.util.Arrays; +import java.util.Collections; import java.util.List; import javax.servlet.http.HttpServletRequest; +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.MetadataFieldName; import org.dspace.core.Context; import org.dspace.xmlworkflow.service.WorkflowRequirementsService; import org.dspace.xmlworkflow.state.Step; +import org.dspace.xmlworkflow.state.actions.ActionAdvancedInfo; import org.dspace.xmlworkflow.state.actions.ActionResult; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; @@ -24,40 +30,121 @@ * This action will allow multiple users to rate a certain item * if the mean of this score is higher then the minimum score the * item will be sent to the next action/step else it will be rejected - * - * @author Bram De Schouwer (bram.deschouwer at dot com) - * @author Kevin Van de Velde (kevin at atmire dot com) - * @author Ben Bosman (ben at atmire dot com) - * @author Mark Diggory (markd at atmire dot com) */ public class ScoreReviewAction extends ProcessingAction { + private static final Logger log = LogManager.getLogger(ScoreReviewAction.class); + + // Option(s) + public static final String SUBMIT_SCORE = "submit_score"; + + // Response param(s) + private static final String SCORE = "score"; + private static final String REVIEW = "review"; + + // Metadata fields to save params in + public static final MetadataFieldName SCORE_FIELD = + new MetadataFieldName(WorkflowRequirementsService.WORKFLOW_SCHEMA, SCORE, null); + public static final MetadataFieldName REVIEW_FIELD = + new MetadataFieldName(WorkflowRequirementsService.WORKFLOW_SCHEMA, REVIEW, null); - private static final String SUBMIT_SCORE = "submit_score"; + // Whether or not it is required that a text review is added to the rating + private boolean descriptionRequired; + // Maximum value rating is allowed to be + private int maxValue; @Override public void activate(Context c, XmlWorkflowItem wf) { - + // empty } @Override public ActionResult execute(Context c, XmlWorkflowItem wfi, Step step, HttpServletRequest request) - throws SQLException, AuthorizeException { - if (request.getParameter(SUBMIT_SCORE) != null) { - int score = Util.getIntParameter(request, "score"); - //Add our score to the metadata - itemService.addMetadata(c, wfi.getItem(), WorkflowRequirementsService.WORKFLOW_SCHEMA, "score", null, null, - String.valueOf(score)); - itemService.update(c, wfi.getItem()); - - return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); - } else { - //We have pressed the leave button so return to our submission page - return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); + throws SQLException, AuthorizeException { + if (super.isOptionInParam(request) && + StringUtils.equalsIgnoreCase(Util.getSubmitButton(request, SUBMIT_CANCEL), SUBMIT_SCORE)) { + return processSetRating(c, wfi, request); } + return new ActionResult(ActionResult.TYPE.TYPE_CANCEL); + } + + private ActionResult processSetRating(Context c, XmlWorkflowItem wfi, HttpServletRequest request) + throws SQLException, AuthorizeException { + + int score = Util.getIntParameter(request, SCORE); + String review = request.getParameter(REVIEW); + if (!this.checkRequestValid(score, review)) { + return new ActionResult(ActionResult.TYPE.TYPE_ERROR); + } + //Add our rating and review to the metadata + itemService.addMetadata(c, wfi.getItem(), SCORE_FIELD.schema, SCORE_FIELD.element, SCORE_FIELD.qualifier, null, + String.valueOf(score)); + if (StringUtils.isNotBlank(review)) { + itemService.addMetadata(c, wfi.getItem(), REVIEW_FIELD.schema, REVIEW_FIELD.element, + REVIEW_FIELD.qualifier, null, String.format("%s - %s", score, review)); + } + itemService.update(c, wfi.getItem()); + + return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); + } + + /** + * Request is not valid if: + * - Given score is higher than configured maxValue + * - There is no review given and description is configured to be required + * Config in workflow-actions.xml + * + * @param score Given score rating from request + * @param review Given review/description from request + * @return True if valid request params with config, otherwise false + */ + private boolean checkRequestValid(int score, String review) { + if (score > this.maxValue) { + log.error("{} only allows max rating {} (config workflow-actions.xml), given rating of " + + "{} not allowed.", this.getClass().toString(), this.maxValue, score); + return false; + } + if (StringUtils.isBlank(review) && this.descriptionRequired) { + log.error("{} has config descriptionRequired=true (workflow-actions.xml), so rating " + + "requests without 'review' query param containing description are not allowed", + this.getClass().toString()); + return false; + } + return true; } @Override public List getOptions() { + return List.of(SUBMIT_SCORE, RETURN_TO_POOL); + } + + @Override + protected List getAdvancedOptions() { return Arrays.asList(SUBMIT_SCORE); } + + @Override + protected List getAdvancedInfo() { + ScoreReviewActionAdvancedInfo scoreReviewActionAdvancedInfo = new ScoreReviewActionAdvancedInfo(); + scoreReviewActionAdvancedInfo.setDescriptionRequired(descriptionRequired); + scoreReviewActionAdvancedInfo.setMaxValue(maxValue); + scoreReviewActionAdvancedInfo.setType(SUBMIT_SCORE); + scoreReviewActionAdvancedInfo.generateId(SUBMIT_SCORE); + return Collections.singletonList(scoreReviewActionAdvancedInfo); + } + + /** + * Setter that sets the descriptionRequired property from workflow-actions.xml + * @param descriptionRequired boolean whether a description is required + */ + public void setDescriptionRequired(boolean descriptionRequired) { + this.descriptionRequired = descriptionRequired; + } + + /** + * Setter that sets the maxValue property from workflow-actions.xml + * @param maxValue integer of the maximum allowed value + */ + public void setMaxValue(int maxValue) { + this.maxValue = maxValue; + } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewActionAdvancedInfo.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewActionAdvancedInfo.java new file mode 100644 index 000000000000..5b97fe3195ae --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/ScoreReviewActionAdvancedInfo.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xmlworkflow.state.actions.processingaction; + +import org.dspace.xmlworkflow.state.actions.ActionAdvancedInfo; +import org.springframework.util.DigestUtils; + +/** + * Class that holds the advanced information needed for the + * {@link org.dspace.xmlworkflow.state.actions.processingaction.ScoreReviewAction} + * See config {@code workflow-actions.cfg} + */ +public class ScoreReviewActionAdvancedInfo extends ActionAdvancedInfo { + private boolean descriptionRequired; + private int maxValue; + + public boolean isDescriptionRequired() { + return descriptionRequired; + } + + public void setDescriptionRequired(boolean descriptionRequired) { + this.descriptionRequired = descriptionRequired; + } + + public int getMaxValue() { + return maxValue; + } + + public void setMaxValue(int maxValue) { + this.maxValue = maxValue; + } + + @Override + public void generateId(String type) { + String idString = type + + ";descriptionRequired," + descriptionRequired + + ";maxValue," + maxValue; + super.id = DigestUtils.md5DigestAsHex(idString.getBytes()); + } +} diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerAction.java index 16a87772755a..0e8ab40a5205 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerAction.java @@ -9,17 +9,27 @@ import java.sql.SQLException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.UUID; +import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; +import org.apache.commons.lang.ArrayUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; import org.dspace.eperson.service.EPersonService; +import org.dspace.eperson.service.GroupService; +import org.dspace.services.ConfigurationService; import org.dspace.xmlworkflow.Role; import org.dspace.xmlworkflow.state.Step; +import org.dspace.xmlworkflow.state.actions.ActionAdvancedInfo; import org.dspace.xmlworkflow.state.actions.ActionResult; import org.dspace.xmlworkflow.storedcomponents.WorkflowItemRole; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; @@ -37,13 +47,13 @@ */ public class SelectReviewerAction extends ProcessingAction { - public static final int SEARCH_RESULTS_PAGE = 1; - - public static final int RESULTS_PER_PAGE = 5; + private static final Logger log = LogManager.getLogger(SelectReviewerAction.class); private static final String SUBMIT_CANCEL = "submit_cancel"; - private static final String SUBMIT_SEARCH = "submit_search"; - private static final String SUBMIT_SELECT_REVIEWER = "submit_select_reviewer_"; + private static final String SUBMIT_SELECT_REVIEWER = "submit_select_reviewer"; + private static final String PARAM_REVIEWER = "eperson"; + + private static final String CONFIG_REVIEWER_GROUP = "action.selectrevieweraction.group"; private Role role; @@ -53,6 +63,15 @@ public class SelectReviewerAction extends ProcessingAction { @Autowired(required = true) private WorkflowItemRoleService workflowItemRoleService; + @Autowired + private ConfigurationService configurationService; + + @Autowired + private GroupService groupService; + + private static Group selectFromReviewsGroup; + private static boolean selectFromReviewsGroupInitialised = false; + @Override public void activate(Context c, XmlWorkflowItem wf) { @@ -60,56 +79,128 @@ public void activate(Context c, XmlWorkflowItem wf) { @Override public ActionResult execute(Context c, XmlWorkflowItem wfi, Step step, HttpServletRequest request) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { String submitButton = Util.getSubmitButton(request, SUBMIT_CANCEL); //Check if our user has pressed cancel if (submitButton.equals(SUBMIT_CANCEL)) { //Send us back to the submissions page return new ActionResult(ActionResult.TYPE.TYPE_CANCEL); + } else if (submitButton.startsWith(SUBMIT_SELECT_REVIEWER)) { + return processSelectReviewers(c, wfi, request); + } + + //There are only 2 active buttons on this page, so if anything else happens just return an error + return new ActionResult(ActionResult.TYPE.TYPE_ERROR); + } - } else if (submitButton.equals(SUBMIT_SEARCH)) { - //Perform the search - String query = request.getParameter("query"); - int page = Util.getIntParameter(request, "result-page"); - if (page == -1) { - page = 0; + /** + * Method to handle the {@link this#SUBMIT_SELECT_REVIEWER} action: + * - will retrieve the reviewer(s) uuid from request (param {@link this#PARAM_REVIEWER}) + * - assign them to a {@link WorkflowItemRole} + * - In {@link org.dspace.xmlworkflow.state.actions.userassignment.AutoAssignAction} these reviewer(s) will get + * claimed task for this {@link XmlWorkflowItem} + * Will result in error if: + * - No reviewer(s) uuid in request (param {@link this#PARAM_REVIEWER}) + * - If none of the reviewer(s) uuid passed along result in valid EPerson + * - If the reviewer(s) passed along are not in {@link this#selectFromReviewsGroup} when it is set + * + * @param c current DSpace session + * @param wfi the item on which the action is to be performed + * @param request the current client request + * @return the result of performing the action + */ + private ActionResult processSelectReviewers(Context c, XmlWorkflowItem wfi, HttpServletRequest request) + throws SQLException, AuthorizeException { + //Retrieve the identifier of the eperson which will do the reviewing + String[] reviewerIds = request.getParameterValues(PARAM_REVIEWER); + if (ArrayUtils.isEmpty(reviewerIds)) { + return new ActionResult(ActionResult.TYPE.TYPE_ERROR); + } + List reviewers = new ArrayList<>(); + for (String reviewerId : reviewerIds) { + EPerson reviewer = ePersonService.find(c, UUID.fromString(reviewerId)); + if (reviewer == null) { + log.warn("No EPerson found with uuid {}", reviewerId); + } else { + reviewers.add(reviewer); } + } - int resultCount = ePersonService.searchResultCount(c, query); - List epeople = ePersonService.search(c, query, page * RESULTS_PER_PAGE, RESULTS_PER_PAGE); + if (!this.checkReviewersValid(c, reviewers)) { + return new ActionResult(ActionResult.TYPE.TYPE_ERROR); + } + createWorkflowItemRole(c, wfi, reviewers); + return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); + } - request.setAttribute("eperson-result-count", resultCount); - request.setAttribute("eperson-results", epeople); - request.setAttribute("result-page", page); - request.setAttribute("page", SEARCH_RESULTS_PAGE); - return new ActionResult(ActionResult.TYPE.TYPE_PAGE, SEARCH_RESULTS_PAGE); - } else if (submitButton.startsWith(SUBMIT_SELECT_REVIEWER)) { - //Retrieve the identifier of the eperson which will do the reviewing - UUID reviewerId = UUID.fromString(submitButton.substring(submitButton.lastIndexOf("_") + 1)); - EPerson reviewer = ePersonService.find(c, reviewerId); - //We have a reviewer, assign him, the workflowitemrole will be translated into a task in the autoassign - WorkflowItemRole workflowItemRole = workflowItemRoleService.create(c); - workflowItemRole.setEPerson(reviewer); - workflowItemRole.setRoleId(getRole().getId()); - workflowItemRole.setWorkflowItem(wfi); - workflowItemRoleService.update(c, workflowItemRole); - return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); + private boolean checkReviewersValid(Context c, List reviewers) throws SQLException { + if (reviewers.size() == 0) { + return false; + } + Group group = this.getGroup(c); + if (group != null) { + for (EPerson reviewer: reviewers) { + if (!groupService.isMember(c, reviewer, group)) { + log.error("Reviewers selected must be member of group {}", group.getID()); + return false; + } + } } + return true; + } - //There are only 2 active buttons on this page, so if anything else happens just return an error - return new ActionResult(ActionResult.TYPE.TYPE_ERROR); + private WorkflowItemRole createWorkflowItemRole(Context c, XmlWorkflowItem wfi, List reviewers) + throws SQLException, AuthorizeException { + WorkflowItemRole workflowItemRole = workflowItemRoleService.create(c); + workflowItemRole.setRoleId(getRole().getId()); + workflowItemRole.setWorkflowItem(wfi); + if (reviewers.size() == 1) { + // 1 reviewer in workflowitemrole => will be translated into a claimed task in the autoassign + workflowItemRole.setEPerson(reviewers.get(0)); + } else { + // multiple reviewers, create a temporary group and assign this group, the workflowitemrole will be + // translated into a claimed task for reviewers in the autoassign, where group will be deleted + c.turnOffAuthorisationSystem(); + Group selectedReviewsGroup = groupService.create(c); + groupService.setName(selectedReviewsGroup, "selectedReviewsGroup_" + wfi.getID()); + for (EPerson reviewer : reviewers) { + groupService.addMember(c, selectedReviewsGroup, reviewer); + } + workflowItemRole.setGroup(selectedReviewsGroup); + c.restoreAuthSystemState(); + } + workflowItemRoleService.update(c, workflowItemRole); + return workflowItemRole; } @Override public List getOptions() { List options = new ArrayList<>(); - options.add(SUBMIT_SEARCH); options.add(SUBMIT_SELECT_REVIEWER); + options.add(RETURN_TO_POOL); return options; } + @Override + protected List getAdvancedOptions() { + return Arrays.asList(SUBMIT_SELECT_REVIEWER); + } + + @Override + protected List getAdvancedInfo() { + List advancedInfo = new ArrayList<>(); + SelectReviewerActionAdvancedInfo selectReviewerActionAdvancedInfo = new SelectReviewerActionAdvancedInfo(); + if (getGroup(null) != null) { + selectReviewerActionAdvancedInfo.setGroup(getGroup(null).getID().toString()); + } + selectReviewerActionAdvancedInfo.setType(SUBMIT_SELECT_REVIEWER); + selectReviewerActionAdvancedInfo.generateId(SUBMIT_SELECT_REVIEWER); + advancedInfo.add(selectReviewerActionAdvancedInfo); + return advancedInfo; + } + public Role getRole() { return role; } @@ -118,4 +209,49 @@ public Role getRole() { public void setRole(Role role) { this.role = role; } + + /** + * Get the Reviewer group from the "action.selectrevieweraction.group" property in actions.cfg by its UUID or name + * Returns null if no (valid) group configured + * + * @return configured reviewers Group from property or null if none + */ + private Group getGroup(@Nullable Context context) { + if (selectFromReviewsGroupInitialised) { + return this.selectFromReviewsGroup; + } + if (context == null) { + context = new Context(); + } + String groupIdOrName = configurationService.getProperty(CONFIG_REVIEWER_GROUP); + + if (StringUtils.isNotBlank(groupIdOrName)) { + Group group = null; + try { + // try to get group by name + group = groupService.findByName(context, groupIdOrName); + if (group == null) { + // try to get group by uuid if not a name + group = groupService.find(context, UUID.fromString(groupIdOrName)); + } + } catch (Exception e) { + // There is an issue with the reviewer group that is set; if it is not set then can be chosen + // from all epeople + log.error("Issue with determining matching group for config {}={} for reviewer group of " + + "select reviewers workflow", CONFIG_REVIEWER_GROUP, groupIdOrName); + } + + this.selectFromReviewsGroup = group; + } + selectFromReviewsGroupInitialised = true; + return this.selectFromReviewsGroup; + } + + /** + * To be used by IT, e.g. {@code XmlWorkflowServiceIT}, when defining new 'Reviewers' group + */ + static public void resetGroup() { + selectFromReviewsGroup = null; + selectFromReviewsGroupInitialised = false; + } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerActionAdvancedInfo.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerActionAdvancedInfo.java new file mode 100644 index 000000000000..7a86a0b03d1f --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SelectReviewerActionAdvancedInfo.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xmlworkflow.state.actions.processingaction; + +import org.dspace.xmlworkflow.state.actions.ActionAdvancedInfo; +import org.springframework.util.DigestUtils; + +/** + * Class that holds the advanced information needed for the + * {@link org.dspace.xmlworkflow.state.actions.processingaction.SelectReviewerAction} + * See config {@code workflow-actions.cfg} + */ +public class SelectReviewerActionAdvancedInfo extends ActionAdvancedInfo { + private String group; + + public String getGroup() { + return group; + } + + public void setGroup(String group) { + this.group = group; + } + + @Override + public void generateId(String type) { + String idString = type + + ";group," + group; + super.id = DigestUtils.md5DigestAsHex(idString.getBytes()); + } +} + diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java index 9ef554821d2a..b3fe896ace24 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/processingaction/SingleUserReviewAction.java @@ -13,11 +13,15 @@ import java.util.List; import javax.servlet.http.HttpServletRequest; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.util.Util; import org.dspace.authorize.AuthorizeException; -import org.dspace.content.DCDate; -import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.workflow.WorkflowException; import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.state.Step; import org.dspace.xmlworkflow.state.actions.ActionResult; @@ -25,7 +29,7 @@ /** * Processing class of an action where a single user has - * been assigned and he can either accept/reject the workflow item + * been assigned and they can either accept/reject the workflow item * or reject the task * * @author Bram De Schouwer (bram.deschouwer at dot com) @@ -34,39 +38,59 @@ * @author Mark Diggory (markd at atmire dot com) */ public class SingleUserReviewAction extends ProcessingAction { - - public static final int MAIN_PAGE = 0; - public static final int REJECT_PAGE = 1; - public static final int SUBMITTER_IS_DELETED_PAGE = 2; + private static final Logger log = LogManager.getLogger(SingleUserReviewAction.class); public static final int OUTCOME_REJECT = 1; - protected static final String SUBMIT_APPROVE = "submit_approve"; - protected static final String SUBMIT_REJECT = "submit_reject"; protected static final String SUBMIT_DECLINE_TASK = "submit_decline_task"; @Override public void activate(Context c, XmlWorkflowItem wfItem) { - + // empty } @Override public ActionResult execute(Context c, XmlWorkflowItem wfi, Step step, HttpServletRequest request) - throws SQLException, AuthorizeException, IOException { - int page = Util.getIntParameter(request, "page"); - - switch (page) { - case MAIN_PAGE: - return processMainPage(c, wfi, step, request); - case REJECT_PAGE: - return processRejectPage(c, wfi, step, request); - case SUBMITTER_IS_DELETED_PAGE: - return processSubmitterIsDeletedPage(c, wfi, request); + throws SQLException, AuthorizeException, IOException, WorkflowException { + if (!super.isOptionInParam(request)) { + return new ActionResult(ActionResult.TYPE.TYPE_CANCEL); + } + switch (Util.getSubmitButton(request, SUBMIT_CANCEL)) { + case SUBMIT_APPROVE: + return processAccept(c, wfi); + case SUBMIT_REJECT: + return processReject(c, wfi, request); + case SUBMIT_DECLINE_TASK: + return processDecline(c, wfi); default: return new ActionResult(ActionResult.TYPE.TYPE_CANCEL); } } + /** + * Process {@link super#SUBMIT_REJECT} on this action, will either: + * - If submitter of item no longer exists => Permanently delete corresponding item (no wfi/wsi remaining) + * - Otherwise: reject item back to submission => becomes wsi of submitter again + */ + private ActionResult processReject(Context c, XmlWorkflowItem wfi, HttpServletRequest request) + throws SQLException, IOException, AuthorizeException { + if (wfi.getSubmitter() == null) { + // If the original submitter is no longer there, delete the task + return processDelete(c, wfi); + } else { + return super.processRejectPage(c, wfi, request); + } + } + + /** + * Accept the workflow item => last step in workflow so will be archived + * Info on step & reviewer will be added on metadata dc.description.provenance of resulting item + */ + public ActionResult processAccept(Context c, XmlWorkflowItem wfi) throws SQLException, AuthorizeException { + super.addApprovedProvenance(c, wfi); + return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); + } + @Override public List getOptions() { List options = new ArrayList<>(); @@ -76,87 +100,29 @@ public List getOptions() { return options; } - public ActionResult processMainPage(Context c, XmlWorkflowItem wfi, Step step, HttpServletRequest request) - throws SQLException, AuthorizeException { - if (request.getParameter(SUBMIT_APPROVE) != null) { - //Delete the tasks - addApprovedProvenance(c, wfi); - - return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, ActionResult.OUTCOME_COMPLETE); - } else if (request.getParameter(SUBMIT_REJECT) != null) { - // Make sure we indicate which page we want to process - if (wfi.getSubmitter() == null) { - request.setAttribute("page", SUBMITTER_IS_DELETED_PAGE); - } else { - request.setAttribute("page", REJECT_PAGE); - } - // We have pressed reject item, so take the user to a page where he can reject - return new ActionResult(ActionResult.TYPE.TYPE_PAGE); - } else if (request.getParameter(SUBMIT_DECLINE_TASK) != null) { - return new ActionResult(ActionResult.TYPE.TYPE_OUTCOME, OUTCOME_REJECT); - - } else { - //We pressed the leave button so return to our submissions page - return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); - } - } - - private void addApprovedProvenance(Context c, XmlWorkflowItem wfi) throws SQLException, AuthorizeException { - //Add the provenance for the accept - String now = DCDate.getCurrent().toString(); - - // Get user's name + email address - String usersName = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() - .getEPersonName(c.getCurrentUser()); - - String provDescription = getProvenanceStartId() + " Approved for entry into archive by " - + usersName + " on " + now + " (GMT) "; - - // Add to item as a DC field - itemService.addMetadata(c, wfi.getItem(), MetadataSchemaEnum.DC.getName(), "description", "provenance", "en", - provDescription); - itemService.update(c, wfi.getItem()); - } - - public ActionResult processRejectPage(Context c, XmlWorkflowItem wfi, Step step, HttpServletRequest request) + /** + * Since original submitter no longer exists, workflow item is permanently deleted + */ + private ActionResult processDelete(Context c, XmlWorkflowItem wfi) throws SQLException, AuthorizeException, IOException { - if (request.getParameter("submit_reject") != null) { - String reason = request.getParameter("reason"); - if (reason == null || 0 == reason.trim().length()) { - request.setAttribute("page", REJECT_PAGE); - addErrorField(request, "reason"); - return new ActionResult(ActionResult.TYPE.TYPE_ERROR); - } - - //We have pressed reject, so remove the task the user has & put it back to a workspace item - XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() - .sendWorkflowItemBackSubmission(c, wfi, c.getCurrentUser(), - this.getProvenanceStartId(), reason); - - - return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); - } else { - //Cancel, go back to the main task page - request.setAttribute("page", MAIN_PAGE); - - return new ActionResult(ActionResult.TYPE.TYPE_PAGE); - } + EPerson user = c.getCurrentUser(); + c.turnOffAuthorisationSystem(); + WorkspaceItem workspaceItem = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() + .abort(c, wfi, user); + ContentServiceFactory.getInstance().getWorkspaceItemService().deleteAll(c, workspaceItem); + c.restoreAuthSystemState(); + return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); } - public ActionResult processSubmitterIsDeletedPage(Context c, XmlWorkflowItem wfi, HttpServletRequest request) - throws SQLException, AuthorizeException, IOException { - if (request.getParameter("submit_delete") != null) { - XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() - .deleteWorkflowByWorkflowItem(c, wfi, c.getCurrentUser()); - // Delete and send user back to myDspace page - return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); - } else if (request.getParameter("submit_keep_it") != null) { - // Do nothing, just send it back to myDspace page - return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); - } else { - //Cancel, go back to the main task page - request.setAttribute("page", MAIN_PAGE); - return new ActionResult(ActionResult.TYPE.TYPE_PAGE); - } + /** + * Selected reviewer declines to review task, then the workflow is aborted and restarted + */ + private ActionResult processDecline(Context c, XmlWorkflowItem wfi) + throws SQLException, IOException, AuthorizeException, WorkflowException { + c.turnOffAuthorisationSystem(); + xmlWorkflowService.restartWorkflow(c, wfi, c.getCurrentUser(), this.getProvenanceStartId()); + c.restoreAuthSystemState(); + return new ActionResult(ActionResult.TYPE.TYPE_SUBMISSION_PAGE); } + } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java index 3c8d85997a5c..0cd82fe77084 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AssignOriginalSubmitterAction.java @@ -17,7 +17,7 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.workflow.WorkflowException; import org.dspace.xmlworkflow.RoleMembers; @@ -87,7 +87,7 @@ public void alertUsersOnActivation(Context c, XmlWorkflowItem wfi, RoleMembers r xmlWorkflowService.getMyDSpaceLink() ); } catch (MessagingException e) { - log.info(LogManager.getHeader(c, "error emailing user(s) for claimed task", + log.info(LogHelper.getHeader(c, "error emailing user(s) for claimed task", "step: " + getParent().getStep().getId() + " workflowitem: " + wfi.getID())); } } @@ -136,7 +136,7 @@ public List getOptions() { protected void createTaskForEPerson(Context c, XmlWorkflowItem wfi, Step step, WorkflowActionConfig actionConfig, EPerson user) throws SQLException, AuthorizeException, IOException { if (claimedTaskService.find(c, wfi, step.getId(), actionConfig.getId()) != null) { - workflowRequirementsService.addClaimedUser(c, wfi, step, c.getCurrentUser()); + workflowRequirementsService.addClaimedUser(c, wfi, step, user); XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() .createOwnedTask(c, wfi, step, actionConfig, user); } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AutoAssignAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AutoAssignAction.java index 3f87c26029d6..401a7c506b98 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AutoAssignAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/AutoAssignAction.java @@ -16,7 +16,7 @@ import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.eperson.service.GroupService; import org.dspace.xmlworkflow.Role; @@ -80,24 +80,28 @@ public ActionResult execute(Context c, XmlWorkflowItem wfi, Step step, HttpServl } //Delete our workflow item role since the users have been assigned workflowItemRoleService.delete(c, workflowItemRole); + if (role.isDeleteTemporaryGroup() && workflowItemRole.getGroup() != null) { + // Delete temporary groups created after members have workflow task assigned + groupService.delete(c, workflowItemRole.getGroup()); + } } } else { - log.warn(LogManager.getHeader(c, "Error while executing auto assign action", + log.warn(LogHelper.getHeader(c, "Error while executing auto assign action", "No valid next action. Workflow item:" + wfi.getID())); } } } catch (SQLException e) { - log.error(LogManager.getHeader(c, "Error while executing auto assign action", + log.error(LogHelper.getHeader(c, "Error while executing auto assign action", "Workflow item: " + wfi.getID() + " step :" + getParent().getStep().getId()), e); throw e; } catch (AuthorizeException e) { - log.error(LogManager.getHeader(c, "Error while executing auto assign action", + log.error(LogHelper.getHeader(c, "Error while executing auto assign action", "Workflow item: " + wfi.getID() + " step :" + getParent().getStep().getId()), e); throw e; } catch (IOException e) { - log.error(LogManager.getHeader(c, "Error while executing auto assign action", + log.error(LogHelper.getHeader(c, "Error while executing auto assign action", "Workflow item: " + wfi.getID() + " step :" + getParent().getStep().getId()), e); throw e; @@ -127,7 +131,7 @@ public List getOptions() { protected void createTaskForEPerson(Context c, XmlWorkflowItem wfi, Step step, WorkflowActionConfig actionConfig, EPerson user) throws SQLException, AuthorizeException, IOException { if (claimedTaskService.find(c, wfi, step.getId(), actionConfig.getId()) != null) { - workflowRequirementsService.addClaimedUser(c, wfi, step, c.getCurrentUser()); + workflowRequirementsService.addClaimedUser(c, wfi, step, user); XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService() .createOwnedTask(c, wfi, step, actionConfig, user); } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/ClaimAction.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/ClaimAction.java index 744bf69136e3..21fcf6f30996 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/ClaimAction.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/actions/userassignment/ClaimAction.java @@ -16,7 +16,7 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -54,7 +54,7 @@ public void activate(Context context, XmlWorkflowItem wfItem) throws SQLExceptio .createPoolTasks(context, wfItem, allroleMembers, owningStep, getParent()); alertUsersOnActivation(context, wfItem, allroleMembers); } else { - log.info(LogManager.getHeader(context, "warning while activating claim action", + log.info(LogHelper.getHeader(context, "warning while activating claim action", "No group or person was found for the following roleid: " + getParent() .getStep().getRole().getId())); } @@ -96,7 +96,7 @@ public void alertUsersOnActivation(Context c, XmlWorkflowItem wfi, RoleMembers r xmlWorkflowService.getMyDSpaceLink() ); } catch (MessagingException e) { - log.info(LogManager.getHeader(c, "error emailing user(s) for claimed task", + log.info(LogHelper.getHeader(c, "error emailing user(s) for claimed task", "step: " + getParent().getStep().getId() + " workflowitem: " + wfi.getID())); } } @@ -113,7 +113,7 @@ public void regenerateTasks(Context c, XmlWorkflowItem wfi, RoleMembers roleMemb } } else { - log.info(LogManager.getHeader(c, "warning while activating claim action", + log.info(LogHelper.getHeader(c, "warning while activating claim action", "No group or person was found for the following roleid: " + getParent() .getStep().getId())); } @@ -138,6 +138,10 @@ public boolean isValidUserSelection(Context context, XmlWorkflowItem wfi, boolea RoleMembers roleMembers = role.getMembers(context, wfi); ArrayList epersons = roleMembers.getAllUniqueMembers(context); + if (epersons.isEmpty() || step.getRequiredUsers() > epersons.size()) { + log.warn(String.format("There must be at least %s ePerson(s) in the group", + step.getRequiredUsers())); + } return !(epersons.isEmpty() || step.getRequiredUsers() > epersons.size()); } else { // We don't have a role and do have a UI so throw a workflow exception diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/ClaimedTask.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/ClaimedTask.java index 8882055f824b..8f4794cb3b45 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/ClaimedTask.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/ClaimedTask.java @@ -71,6 +71,7 @@ protected ClaimedTask() { } + @Override public Integer getID() { return id; } @@ -91,24 +92,24 @@ public XmlWorkflowItem getWorkflowItem() { return workflowItem; } - public void setActionID(String actionID) { - this.actionId = actionID; + public void setActionID(String actionId) { + this.actionId = actionId; } public String getActionID() { return actionId; } - public void setStepID(String stepID) { - this.stepId = stepID; + public void setStepID(String stepId) { + this.stepId = stepId; } public String getStepID() { return stepId; } - public void setWorkflowID(String workflowID) { - this.workflowId = workflowID; + public void setWorkflowID(String workflowId) { + this.workflowId = workflowId; } public String getWorkflowID() { diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/CollectionRole.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/CollectionRole.java index 114db170874c..c9a7995e0390 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/CollectionRole.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/CollectionRole.java @@ -91,6 +91,7 @@ public Group getGroup() throws SQLException { return group; } + @Override public Integer getID() { return id; } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/InProgressUser.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/InProgressUser.java index 5cd714345e90..efbd26bde5f5 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/InProgressUser.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/InProgressUser.java @@ -59,6 +59,7 @@ protected InProgressUser() { } + @Override public Integer getID() { return id; } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTask.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTask.java index 2a87de51cb6c..9cfc9ea06826 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTask.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTask.java @@ -78,12 +78,13 @@ protected PoolTask() { } + @Override public Integer getID() { return id; } - public void setEperson(EPerson eperson) { - this.ePerson = eperson; + public void setEperson(EPerson ePerson) { + this.ePerson = ePerson; } public EPerson getEperson() { @@ -114,16 +115,16 @@ public XmlWorkflowItem getWorkflowItem() { return this.workflowItem; } - public void setStepID(String stepID) { - this.stepId = stepID; + public void setStepID(String stepId) { + this.stepId = stepId; } public String getStepID() { return stepId; } - public void setActionID(String actionID) { - this.actionId = actionID; + public void setActionID(String actionId) { + this.actionId = actionId; } public String getActionID() { diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java index f64f1b3942e1..fb673725e181 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/PoolTaskServiceImpl.java @@ -92,7 +92,7 @@ public PoolTask findByWorkflowIdAndEPerson(Context context, XmlWorkflowItem work return poolTask; } else { //If the user has a is processing or has finished the step for a workflowitem, there is no need to look - // for pooltasks for one of his + // for pooltasks for one of their //groups because the user already has the task claimed if (inProgressUserService.findByWorkflowItemAndEPerson(context, workflowItem, ePerson) != null) { return null; diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/WorkflowItemRole.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/WorkflowItemRole.java index 9a7e5a034c4a..cc6df9731baa 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/WorkflowItemRole.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/WorkflowItemRole.java @@ -69,7 +69,7 @@ protected WorkflowItemRole() { } - + @Override public Integer getID() { return id; } @@ -90,8 +90,8 @@ public XmlWorkflowItem getWorkflowItem() { return workflowItem; } - public void setEPerson(EPerson eperson) { - this.ePerson = eperson; + public void setEPerson(EPerson ePerson) { + this.ePerson = ePerson; } public EPerson getEPerson() throws SQLException { diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/XmlWorkflowItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/XmlWorkflowItemServiceImpl.java index 010c310ba64b..181bb9985fe2 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/XmlWorkflowItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/XmlWorkflowItemServiceImpl.java @@ -18,7 +18,7 @@ import org.dspace.content.Item; import org.dspace.content.service.ItemService; import org.dspace.core.Context; -import org.dspace.core.LogManager; +import org.dspace.core.LogHelper; import org.dspace.eperson.EPerson; import org.dspace.xmlworkflow.service.WorkflowRequirementsService; import org.dspace.xmlworkflow.storedcomponents.dao.XmlWorkflowItemDAO; @@ -77,12 +77,12 @@ public XmlWorkflowItem find(Context context, int id) throws SQLException { if (workflowItem == null) { if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_workflow_item", + log.debug(LogHelper.getHeader(context, "find_workflow_item", "not_found,workflowitem_id=" + id)); } } else { if (log.isDebugEnabled()) { - log.debug(LogManager.getHeader(context, "find_workflow_item", + log.debug(LogHelper.getHeader(context, "find_workflow_item", "workflowitem_id=" + id)); } } @@ -176,7 +176,7 @@ public XmlWorkflowItem findByItem(Context context, Item item) throws SQLExceptio @Override public void update(Context context, XmlWorkflowItem workflowItem) throws SQLException, AuthorizeException { // FIXME check auth - log.info(LogManager.getHeader(context, "update_workflow_item", + log.info(LogHelper.getHeader(context, "update_workflow_item", "workflowitem_id=" + workflowItem.getID())); // Update the item diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/ClaimedTaskDAOImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/ClaimedTaskDAOImpl.java index bb5a167237fd..956a4648c53a 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/ClaimedTaskDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/ClaimedTaskDAOImpl.java @@ -23,7 +23,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the ClaimedTask object. - * This class is responsible for all database calls for the ClaimedTask object and is autowired by spring + * This class is responsible for all database calls for the ClaimedTask object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -56,7 +56,7 @@ public ClaimedTask findByWorkflowItemAndEPerson(Context context, XmlWorkflowItem criteriaBuilder.equal(claimedTaskRoot.get(ClaimedTask_.owner), ePerson) ) ); - return uniqueResult(context, criteriaQuery, false, ClaimedTask.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, ClaimedTask.class); } @@ -101,7 +101,7 @@ public ClaimedTask findByEPersonAndWorkflowItemAndStepIdAndActionId(Context cont criteriaBuilder.equal(claimedTaskRoot.get(ClaimedTask_.actionId), actionID) ) ); - return uniqueResult(context, criteriaQuery, false, ClaimedTask.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, ClaimedTask.class); } @Override diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/CollectionRoleDAOImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/CollectionRoleDAOImpl.java index 429bf9647138..b3cd32c74f0d 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/CollectionRoleDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/CollectionRoleDAOImpl.java @@ -24,7 +24,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the CollectionRole object. - * This class is responsible for all database calls for the CollectionRole object and is autowired by spring + * This class is responsible for all database calls for the CollectionRole object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -66,7 +66,7 @@ public CollectionRole findByCollectionAndRole(Context context, Collection collec criteriaBuilder.equal(collectionRoleRoot.get(CollectionRole_.roleId), role) ) ); - return uniqueResult(context, criteriaQuery, false, CollectionRole.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, CollectionRole.class); } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/InProgressUserDAOImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/InProgressUserDAOImpl.java index cdba1600a8b5..783d403c054a 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/InProgressUserDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/InProgressUserDAOImpl.java @@ -23,7 +23,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the InProgressUser object. - * This class is responsible for all database calls for the InProgressUser object and is autowired by spring + * This class is responsible for all database calls for the InProgressUser object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -46,7 +46,7 @@ public InProgressUser findByWorkflowItemAndEPerson(Context context, XmlWorkflowI criteriaBuilder.equal(inProgressUserRoot.get(InProgressUser_.ePerson), ePerson) ) ); - return uniqueResult(context, criteriaQuery, false, InProgressUser.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, InProgressUser.class); } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/PoolTaskDAOImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/PoolTaskDAOImpl.java index b38041da395b..0857a325b5df 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/PoolTaskDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/PoolTaskDAOImpl.java @@ -24,7 +24,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the PoolTask object. - * This class is responsible for all database calls for the PoolTask object and is autowired by spring + * This class is responsible for all database calls for the PoolTask object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -77,7 +77,7 @@ public PoolTask findByWorkflowItemAndEPerson(Context context, XmlWorkflowItem wo criteriaBuilder.equal(poolTaskRoot.get(PoolTask_.ePerson), ePerson) ) ); - return uniqueResult(context, criteriaQuery, false, PoolTask.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, PoolTask.class); } @Override @@ -92,6 +92,6 @@ public PoolTask findByWorkflowItemAndGroup(Context context, Group group, XmlWork criteriaBuilder.equal(poolTaskRoot.get(PoolTask_.group), group) ) ); - return uniqueResult(context, criteriaQuery, false, PoolTask.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, PoolTask.class); } } diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/XmlWorkflowItemDAOImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/XmlWorkflowItemDAOImpl.java index 51728af7a49c..659a2123d90a 100644 --- a/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/XmlWorkflowItemDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/storedcomponents/dao/impl/XmlWorkflowItemDAOImpl.java @@ -26,7 +26,7 @@ /** * Hibernate implementation of the Database Access Object interface class for the XmlWorkflowItem object. - * This class is responsible for all database calls for the XmlWorkflowItem object and is autowired by spring + * This class is responsible for all database calls for the XmlWorkflowItem object and is autowired by Spring. * This class should never be accessed directly. * * @author kevinvandevelde at atmire.com @@ -132,6 +132,6 @@ public XmlWorkflowItem findByItem(Context context, Item item) throws SQLExceptio Root xmlWorkflowItemRoot = criteriaQuery.from(XmlWorkflowItem.class); criteriaQuery.select(xmlWorkflowItemRoot); criteriaQuery.where(criteriaBuilder.equal(xmlWorkflowItemRoot.get(XmlWorkflowItem_.item), item)); - return uniqueResult(context, criteriaQuery, false, XmlWorkflowItem.class, -1, -1); + return uniqueResult(context, criteriaQuery, false, XmlWorkflowItem.class); } } diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index c7dec3665536..efbbeedde053 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -6,6 +6,9 @@ # http://www.dspace.org/license/ # +iiif.canvas.default-naming = Page +iiif.toc.root-label = Table of Contents + itemlist.dc.contributor.* = Author(s) itemlist.dc.contributor.author = Author(s) itemlist.dc.creator = Author(s) @@ -38,8 +41,17 @@ metadata.dc.relation.ispartofseries = Series/Report no. metadata.dc.subject = Keywords metadata.dc.title = Title metadata.dc.title.alternative = Other Titles +metadata.bitstream.dc.title = File name +metadata.bitstream.dc.description = Description +metadata.bitstream.iiif.image.width = Image Width (px) +metadata.bitstream.iiif.image.height= Image Height (px) +metadata.bitstream.iiif-virtual.format = Format +metadata.bitstream.iiif-virtual.mimetype = Mime Type +metadata.bitstream.iiif-virtual.bytes = File size +metadata.bitstream.iiif-virtual.checksum = Checksum org.dspace.app.itemexport.no-result = The DSpaceObject that you specified has no items. +org.dspace.app.util.SyndicationFeed.no-description = No Description org.dspace.checker.ResultsLogger.bitstream-format = Bitstream format org.dspace.checker.ResultsLogger.bitstream-found = Bitstream found org.dspace.checker.ResultsLogger.bitstream-id = Bitstream ID @@ -108,3 +120,7 @@ org.dspace.app.rest.exception.RESTEmptyWorkflowGroupException.message = Refused workflow group {1}. Delete the tasks and group first if you want to remove this user. org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eperson.firstname and eperson.lastname values need to be filled in org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided +org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks +org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long! +org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message = Bitstream with uuid {0} could not be found in \ + the repository diff --git a/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl b/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl index f32942a302a2..d9f6cd361434 100644 --- a/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl +++ b/dspace-api/src/main/resources/org/dspace/license/CreativeCommons.xsl @@ -8,7 +8,7 @@ http://www.dspace.org/license/ --> - @@ -47,4 +47,4 @@ - \ No newline at end of file + diff --git a/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl b/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl index 84c62158fe75..d9a9745a1b10 100644 --- a/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl +++ b/dspace-api/src/main/resources/org/dspace/license/LicenseCleanup.xsl @@ -8,7 +8,7 @@ http://www.dspace.org/license/ --> - - \ No newline at end of file + diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql deleted file mode 100644 index 7907fccc00ae..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- Copyright 2010-2017 Boxfuse GmbH --- --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at --- --- http://www.apache.org/licenses/LICENSE-2.0 --- --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. --- ------------------ --- This is the Oracle upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql --- --- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------- - -DROP INDEX "${schema}"."${table}_vr_idx"; -DROP INDEX "${schema}"."${table}_ir_idx"; -ALTER TABLE "${schema}"."${table}" DROP COLUMN "version_rank"; -ALTER TABLE "${schema}"."${table}" DROP PRIMARY KEY DROP INDEX; -ALTER TABLE "${schema}"."${table}" MODIFY "version" NULL; -ALTER TABLE "${schema}"."${table}" ADD CONSTRAINT "${table}_pk" PRIMARY KEY ("installed_rank"); -UPDATE "${schema}"."${table}" SET "type"='BASELINE' WHERE "type"='INIT'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql index 7548fa4c6acb..edebe6e087fb 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql @@ -15,7 +15,7 @@ -- ----------------- -- This is the PostgreSQL upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql +-- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/postgresql/upgradeMetaDataTable.sql -- -- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md index 8088c6ccca62..87e114ca53a5 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md @@ -4,33 +4,25 @@ in Production. Instead, DSpace uses the H2 Database to perform Unit Testing during development. -By default, the DSpace Unit Testing environment configures H2 to run in -"Oracle Mode" and initializes the H2 database using the scripts in this directory. -These database migrations are automatically called by [Flyway](http://flywaydb.org/) -when the `DatabaseManager` initializes itself (see `initializeDatabase()` method). - -The H2 migrations in this directory are *based on* the Oracle Migrations, but -with some modifications in order to be valid in H2. - -## Oracle vs H2 script differences +By default, the DSpace Unit Testing environment configures H2 to run in memory +and initializes the H2 database using the scripts in this directory. See +`[src]/dspace-api/src/test/data/dspaceFolder/config/local.cfg`. -One of the primary differences between the Oracle scripts and these H2 ones -is in the syntax of the `ALTER TABLE` command. Unfortunately, H2's syntax for -that command differs greatly from Oracle (and PostgreSQL as well). +These database migrations are automatically called by [Flyway](http://flywaydb.org/) +in `DatabaseUtils`. -Most of the remainder of the scripts contain the exact Oracle syntax (which is -usually valid in H2). But, to you can always `diff` scripts of the same name -for further syntax differences. +The H2 migrations in this directory all use H2's grammar/syntax. +For additional info see the [H2 SQL Grammar](https://www.h2database.com/html/grammar.html). -For additional info see the [H2 SQL Grammar](http://www.h2database.com/html/grammar.html). ## More Information on Flyway The SQL scripts in this directory are H2-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V1.4__Upgrade_to_DSpace_1.4_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V1.4__Upgrade_to_DSpace_1.4_schema.sql index e00a6516261c..62d12fe5ce25 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V1.4__Upgrade_to_DSpace_1.4_schema.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V1.4__Upgrade_to_DSpace_1.4_schema.sql @@ -245,13 +245,13 @@ insert into most_recent_checksum ) select bitstream.bitstream_id, - '1', + true, CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, FORMATDATETIME(NOW(),'DD-MM-RRRR HH24:MI:SS'), FORMATDATETIME(NOW(),'DD-MM-RRRR HH24:MI:SS'), CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END, - '1' + true from bitstream; -- Update all the deleted checksums @@ -263,7 +263,7 @@ update most_recent_checksum set to_be_processed = 0 where most_recent_checksum.bitstream_id in ( select bitstream_id -from bitstream where deleted = '1' ); +from bitstream where deleted = true ); -- this will insert into history table -- for the initial start diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql index 87551bdf4e9b..cd908279f158 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql @@ -36,7 +36,7 @@ alter table metadatavalue alter column resource_id set not null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, community_id AS resource_id, 4 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, @@ -47,7 +47,7 @@ FROM community where not introductory_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, community_id AS resource_id, 4 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, @@ -58,7 +58,7 @@ FROM community where not short_description is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, community_id AS resource_id, 4 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, @@ -69,7 +69,7 @@ FROM community where not side_bar_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, community_id AS resource_id, 4 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, @@ -80,7 +80,7 @@ FROM community where not copyright_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, community_id AS resource_id, 4 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, @@ -104,7 +104,7 @@ alter table community drop column name; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, @@ -115,7 +115,7 @@ FROM collection where not introductory_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, @@ -126,7 +126,7 @@ FROM collection where not short_description is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, @@ -137,7 +137,7 @@ FROM collection where not side_bar_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, @@ -148,7 +148,7 @@ FROM collection where not copyright_text is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, @@ -159,7 +159,7 @@ FROM collection where not name is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id, @@ -170,7 +170,7 @@ FROM collection where not provenance_description is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, collection_id AS resource_id, 3 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id, @@ -194,7 +194,7 @@ alter table collection drop column provenance_description; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, bundle_id AS resource_id, 1 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, @@ -214,7 +214,7 @@ alter table bundle drop column name; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, bitstream_id AS resource_id, 0 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, @@ -225,7 +225,7 @@ FROM bitstream where not name is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, bitstream_id AS resource_id, 0 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, @@ -236,7 +236,7 @@ FROM bitstream where not description is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, bitstream_id AS resource_id, 0 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id, @@ -247,7 +247,7 @@ FROM bitstream where not user_format_description is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, bitstream_id AS resource_id, 0 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id, @@ -269,7 +269,7 @@ alter table bitstream drop column source; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, eperson_group_id AS resource_id, 6 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, @@ -288,7 +288,7 @@ alter table epersongroup drop column name; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, eperson_id AS resource_id, 7 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id, @@ -299,7 +299,7 @@ FROM eperson where not firstname is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, eperson_id AS resource_id, 7 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id, @@ -310,7 +310,7 @@ FROM eperson where not lastname is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, eperson_id AS resource_id, 7 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id, @@ -321,7 +321,7 @@ FROM eperson where not phone is null; INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) SELECT -metadatavalue_seq.nextval as metadata_value_id, +NEXT VALUE FOR metadatavalue_seq as metadata_value_id, eperson_id AS resource_id, 7 AS resource_type_id, (select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id, diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.6_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.6_2016.08.23__DS-3097.sql index 2e09b807de3b..0bd68c520193 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.6_2016.08.23__DS-3097.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V5.6_2016.08.23__DS-3097.sql @@ -14,11 +14,11 @@ UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_i SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 + WHERE item.withdrawn = true ); UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in ( SELECT item2bundle.bundle_id FROM item2bundle LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 + WHERE item.withdrawn = true ); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql index 1c98ceef2a97..1ee23246eaae 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql @@ -17,7 +17,7 @@ INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname, rptype, rpdescription, eperson_id, epersongroup_id, dspace_object) SELECT -resourcepolicy_seq.nextval AS policy_id, +NEXT VALUE FOR resourcepolicy_seq AS policy_id, resource_type_id, resource_id, -- Insert the Constants.DELETE action diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.08.23__DS-3097.sql index e1220c8c7cce..5bb59970c55b 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.08.23__DS-3097.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V6.0_2016.08.23__DS-3097.sql @@ -14,11 +14,11 @@ UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object i SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 + WHERE item.withdrawn = true ); UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( SELECT item2bundle.bundle_id FROM item2bundle LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 + WHERE item.withdrawn = true ); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql new file mode 100644 index 000000000000..5a6abda04101 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql @@ -0,0 +1,28 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== + +------------------------------------------------------------------------------------------------------- +------------------------------------------------------------------------------------------------------- +UPDATE metadatavalue SET dspace_object_id = (SELECT uuid + FROM collection + WHERE template_item_id = dspace_object_id) +WHERE dspace_object_id IN (SELECT template_item_id + FROM Collection) + AND metadata_field_id + IN (SELECT metadata_field_id + FROM metadatafieldregistry mfr LEFT JOIN metadataschemaregistry msr + ON mfr.metadata_schema_id = msr.metadata_schema_id + WHERE msr.short_id = 'dspace' AND mfr.element = 'entity' AND mfr.qualifier = 'type'); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql new file mode 100644 index 000000000000..ae8f1e7ef5d2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql @@ -0,0 +1,15 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +------------------------------------------------------------------------------------- +---- ALTER table collection +------------------------------------------------------------------------------------- + +ALTER TABLE collection DROP COLUMN workflow_step_1; +ALTER TABLE collection DROP COLUMN workflow_step_2; +ALTER TABLE collection DROP COLUMN workflow_step_3; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql new file mode 100644 index 000000000000..7506433cddbc --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql @@ -0,0 +1,29 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +---------------------------------------------------- +-- Make sure the metadatavalue.place column starts at 0 instead of 1 +---------------------------------------------------- + +CREATE LOCAL TEMPORARY TABLE mdv_minplace ( + dspace_object_id UUID NOT NULL, + metadata_field_id INT NOT NULL, + minplace INT NOT NULL +); + +INSERT INTO mdv_minplace +SELECT dspace_object_id, metadata_field_id, MIN(place) AS minplace +FROM metadatavalue +GROUP BY dspace_object_id, metadata_field_id; + +UPDATE metadatavalue AS mdv +SET place = mdv.place - ( + SELECT minplace FROM mdv_minplace AS mp + WHERE mp.dspace_object_id = mdv.dspace_object_id + AND mp.metadata_field_id = mdv.metadata_field_id +); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.04.29__orcid_queue_and_history.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.04.29__orcid_queue_and_history.sql new file mode 100644 index 000000000000..e76926480a80 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.04.29__orcid_queue_and_history.sql @@ -0,0 +1,54 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create tables for ORCID Queue and History +----------------------------------------------------------------------------------- + +CREATE SEQUENCE orcid_queue_id_seq; + +CREATE TABLE orcid_queue +( + id INTEGER NOT NULL, + owner_id UUID NOT NULL, + entity_id UUID, + put_code VARCHAR(255), + record_type VARCHAR(255), + description VARCHAR(255), + operation VARCHAR(255), + metadata CLOB, + attempts INTEGER, + CONSTRAINT orcid_queue_pkey PRIMARY KEY (id), + CONSTRAINT orcid_queue_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), + CONSTRAINT orcid_queue_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) +); + +CREATE INDEX orcid_queue_owner_id_index on orcid_queue(owner_id); + + +CREATE SEQUENCE orcid_history_id_seq; + +CREATE TABLE orcid_history +( + id INTEGER NOT NULL, + owner_id UUID NOT NULL, + entity_id UUID, + put_code VARCHAR(255), + timestamp_last_attempt TIMESTAMP, + response_message CLOB, + status INTEGER, + metadata CLOB, + operation VARCHAR(255), + record_type VARCHAR(255), + description VARCHAR(255), + CONSTRAINT orcid_history_pkey PRIMARY KEY (id), + CONSTRAINT orcid_history_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), + CONSTRAINT orcid_history_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) +); + +CREATE INDEX orcid_history_owner_id_index on orcid_history(owner_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.05.16__Orcid_token_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.05.16__Orcid_token_table.sql new file mode 100644 index 000000000000..8bda3a8acd5e --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.05.16__Orcid_token_table.sql @@ -0,0 +1,24 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create table for ORCID access tokens +----------------------------------------------------------------------------------- + +CREATE SEQUENCE orcid_token_id_seq; + +CREATE TABLE orcid_token +( + id INTEGER NOT NULL, + eperson_id UUID NOT NULL UNIQUE, + profile_item_id UUID, + access_token VARCHAR(100) NOT NULL, + CONSTRAINT orcid_token_pkey PRIMARY KEY (id), + CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid), + CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid) +); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.16__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.16__process_to_group.sql new file mode 100644 index 000000000000..0e7d417ae52d --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.16__process_to_group.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +------------------------------------------------------------------------------- +-- Table to store Groups related to a Process on its creation +------------------------------------------------------------------------------- + +CREATE TABLE Process2Group +( + process_id INTEGER REFERENCES Process(process_id), + group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE, + CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id) +); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql new file mode 100644 index 000000000000..7bf3948d3a63 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class) +ALTER TABLE relationship ADD COLUMN IF NOT EXISTS latest_version_status INTEGER DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql new file mode 100644 index 000000000000..dc187d3c2784 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql @@ -0,0 +1,44 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- ADD table subscription_parameter +----------------------------------------------------------------------------------- + + +CREATE SEQUENCE if NOT EXISTS subscription_parameter_seq; +------------------------------------------------------- +-- Create the subscription_parameter table +------------------------------------------------------- + +CREATE TABLE if NOT EXISTS subscription_parameter +( + subscription_parameter_id INTEGER NOT NULL, + name CHARACTER VARYING(255), + value CHARACTER VARYING(255), + subscription_id INTEGER NOT NULL, + CONSTRAINT subscription_parameter_pkey PRIMARY KEY (subscription_parameter_id), + CONSTRAINT subscription_parameter_subscription_fkey FOREIGN KEY (subscription_id) REFERENCES subscription (subscription_id) ON DELETE CASCADE +); + +-- +ALTER TABLE subscription ADD COLUMN if NOT EXISTS dspace_object_id UUID; +-- +ALTER TABLE subscription ADD COLUMN if NOT EXISTS type CHARACTER VARYING(255); +-- +ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_dspaceobject_fkey; +ALTER TABLE subscription ADD CONSTRAINT subscription_dspaceobject_fkey FOREIGN KEY (dspace_object_id) REFERENCES dspaceobject (uuid); +-- -- +UPDATE subscription set dspace_object_id = collection_id , type = 'content'; +-- +ALTER TABLE subscription DROP CONSTRAINT IF EXISTS Subscription_collection_id_fk; +-- +ALTER TABLE subscription DROP COLUMN IF EXISTS collection_id; + + + diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.06__index_action_resource_policy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.06__index_action_resource_policy.sql new file mode 100644 index 000000000000..696e84433dcd --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.06__index_action_resource_policy.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +CREATE INDEX resourcepolicy_action_idx ON resourcepolicy(action_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.09__Supervision_Orders_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.09__Supervision_Orders_table.sql new file mode 100644 index 000000000000..33d3eb5c82c8 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.09__Supervision_Orders_table.sql @@ -0,0 +1,20 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +------------------------------------------------------------------------------- +-- Table to store supervision orders +------------------------------------------------------------------------------- + +CREATE TABLE supervision_orders +( + id INTEGER PRIMARY KEY, + item_id UUID REFERENCES Item(uuid) ON DELETE CASCADE, + eperson_group_id UUID REFERENCES epersongroup(uuid) ON DELETE CASCADE +); + +CREATE SEQUENCE supervision_orders_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.15__system_wide_alerts.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.15__system_wide_alerts.sql new file mode 100644 index 000000000000..9d13138fdada --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.5_2022.12.15__system_wide_alerts.sql @@ -0,0 +1,22 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create table for System wide alerts +----------------------------------------------------------------------------------- + +CREATE SEQUENCE alert_id_seq; + +CREATE TABLE systemwidealert +( + alert_id INTEGER NOT NULL PRIMARY KEY, + message VARCHAR(512), + allow_sessions VARCHAR(64), + countdown_to TIMESTAMP, + active BOOLEAN +); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql new file mode 100644 index 000000000000..47cd157336af --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.17__Remove_unused_sequence.sql @@ -0,0 +1,13 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) +----------------------------------------------------------------------------------- + +DROP SEQUENCE history_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql new file mode 100644 index 000000000000..8aec44a7f6f2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql @@ -0,0 +1,17 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Update short description for PNG mimetype in the bitstream format registry +-- See: https://github.com/DSpace/DSpace/pull/8722 +----------------------------------------------------------------------------------- + +UPDATE bitstreamformatregistry +SET short_description='PNG' +WHERE short_description='image/png' + AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql new file mode 100644 index 000000000000..7641eb9fc2c0 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE orcid_history ALTER COLUMN description SET DATA TYPE CLOB; +ALTER TABLE orcid_queue ALTER COLUMN description SET DATA TYPE CLOB; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..1028ba370c47 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process ALTER COLUMN parameters SET DATA TYPE CLOB; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V8.0_2023.08.07__qaevent_processed.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V8.0_2023.08.07__qaevent_processed.sql new file mode 100644 index 000000000000..467de85f8505 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/V8.0_2023.08.07__qaevent_processed.sql @@ -0,0 +1,16 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +CREATE TABLE qaevent_processed ( + qaevent_id VARCHAR(255) NOT NULL, + qaevent_timestamp TIMESTAMP NULL, + eperson_uuid UUID NULL REFERENCES eperson(uuid), + item_uuid uuid NOT NULL REFERENCES item(uuid) +); + +CREATE INDEX item_uuid_idx ON qaevent_processed(item_uuid); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql deleted file mode 100644 index fff1fe154f57..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql +++ /dev/null @@ -1,90 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create COMMUNITY handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - -------------------------------------------------------------- --- This will create COLLECTION handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md deleted file mode 100644 index 229b70ec37da..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# Oracle Flyway Database Migrations (i.e. Upgrades) - -The SQL scripts in this directory are Oracle-specific database migrations. They are -used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). -As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using -and then executes the appropriate upgrade script(s) to bring it up to the latest -version. - -If any failures occur, Flyway will "rollback" the upgrade script which resulted -in an error and log the issue in the DSpace log file at `[dspace]/log/dspace.log.[date]` - -**WARNING:** IT IS NOT RECOMMENDED TO RUN THESE SCRIPTS MANUALLY. If you do so, -Flyway will may throw failures the next time you startup DSpace, as Flyway will -not realize you manually ran one or more scripts. - -Please see the Flyway Documentation for more information: http://flywaydb.org/ - -## Oracle Porting Notes for the Curious - -Oracle is missing quite a number of cool features found in Postgres, so -workarounds had to be found, most of which are hidden behind tests in -DatabaseManager. If Oracle is your DBMS, the workarounds are activated: - -Oracle doesn't like ';' characters in JDBC SQL - they have all been removed -from the DSpace source, including code in the .sql file reader to strip ;'s. - -browse code - LIMIT and OFFSET is used to limit browse results, and an -Oracle-hack is used to limit the result set to a given size - -Oracle has no boolean data type, so a new schema file was created that -uses NUMBER(1) (AKA 'integers') and code is inserted everywhere to use 0 for -false and 1 for true if DSpace is using Oracle. - -Oracle doesn't have a TEXT data type either, so TEXT columns are defined -as VARCHAR2 in the Oracle-specific schema. - -Oracle doesn't allow dynamic naming for objects, so our cute trick to -derive the name of the sequence by appending _seq to the table name -in a function doesn't work in Oracle - workaround is to insert Oracle -code to generate the name of the sequence and then place that into -our SQL calls to generate a new ID. - -Oracle doesn't let you directly set the value of sequences, so -update-sequences.sql is forced to use a special script sequpdate.sql -to update the sequences. - -Bitstream had a column 'size' which is a reserved word in Oracle, -so this had to be changed to 'size_bytes' with corresponding code changes. - -VARCHAR2 has a limit of 4000 characters, so DSpace text data is limited to 4k. -Going to the CLOB data type can get around that, but seemed like too much effort -for now. Note that with UTF-8 encoding that 4k could translate to 1300 -characters worst-case (every character taking up 3 bytes is the worst case -scenario.) - -### UPDATE 5 April 2007 - -CLOBs are now used as follows: -MetadataValue:text_value -Community:introductory_text -Community:copyright_text -Collection:introductory_text -Collection:license -Collection:copyright_text - -DatabaseManager had to have some of the type checking changed, because Oracle's -JDBC driver is reporting INTEGERS as type DECIMAL. - -Oracle doesn't like it when you reference table names in lower case when -getting JDBC metadata for the tables, so they are converted in TableRow -to upper case. - -### UPDATE 27 November 2012 - -Oracle complains with ORA-01408 if you attempt to create an index on a column which -has already had the UNIQUE contraint added (such an index is implicit in maintaining the uniqueness -of the column). See [DS-1370](https://jira.duraspace.org/browse/DS-1370) for details. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql deleted file mode 100644 index 157274e05d66..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql +++ /dev/null @@ -1,550 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE bitstreamformatregistry_seq; -CREATE SEQUENCE fileextension_seq; -CREATE SEQUENCE bitstream_seq; -CREATE SEQUENCE eperson_seq; --- start group sequence at 0, since Anonymous group = 0 -CREATE SEQUENCE epersongroup_seq MINVALUE 0 START WITH 0; -CREATE SEQUENCE item_seq; -CREATE SEQUENCE bundle_seq; -CREATE SEQUENCE item2bundle_seq; -CREATE SEQUENCE bundle2bitstream_seq; -CREATE SEQUENCE dctyperegistry_seq; -CREATE SEQUENCE dcvalue_seq; -CREATE SEQUENCE community_seq; -CREATE SEQUENCE collection_seq; -CREATE SEQUENCE community2community_seq; -CREATE SEQUENCE community2collection_seq; -CREATE SEQUENCE collection2item_seq; -CREATE SEQUENCE resourcepolicy_seq; -CREATE SEQUENCE epersongroup2eperson_seq; -CREATE SEQUENCE handle_seq; -CREATE SEQUENCE workspaceitem_seq; -CREATE SEQUENCE workflowitem_seq; -CREATE SEQUENCE tasklistitem_seq; -CREATE SEQUENCE registrationdata_seq; -CREATE SEQUENCE subscription_seq; -CREATE SEQUENCE history_seq; -CREATE SEQUENCE historystate_seq; -CREATE SEQUENCE communities2item_seq; -CREATE SEQUENCE itemsbyauthor_seq; -CREATE SEQUENCE itemsbytitle_seq; -CREATE SEQUENCE itemsbydate_seq; -CREATE SEQUENCE itemsbydateaccessioned_seq; - - -------------------------------------------------------- --- BitstreamFormatRegistry table -------------------------------------------------------- -CREATE TABLE BitstreamFormatRegistry -( - bitstream_format_id INTEGER PRIMARY KEY, - mimetype VARCHAR2(48), - short_description VARCHAR2(128) UNIQUE, - description VARCHAR2(2000), - support_level INTEGER, - -- Identifies internal types - internal NUMBER(1) -); - -------------------------------------------------------- --- FileExtension table -------------------------------------------------------- -CREATE TABLE FileExtension -( - file_extension_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - extension VARCHAR2(16) -); - -------------------------------------------------------- --- Bitstream table -------------------------------------------------------- -CREATE TABLE Bitstream -( - bitstream_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - name VARCHAR2(256), - size_bytes INTEGER, - checksum VARCHAR2(64), - checksum_algorithm VARCHAR2(32), - description VARCHAR2(2000), - user_format_description VARCHAR2(2000), - source VARCHAR2(256), - internal_id VARCHAR2(256), - deleted NUMBER(1), - store_number INTEGER, - sequence_id INTEGER -); - -------------------------------------------------------- --- EPerson table -------------------------------------------------------- -CREATE TABLE EPerson -( - eperson_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - password VARCHAR2(64), - firstname VARCHAR2(64), - lastname VARCHAR2(64), - can_log_in NUMBER(1), - require_certificate NUMBER(1), - self_registered NUMBER(1), - last_active TIMESTAMP, - sub_frequency INTEGER, - phone VARCHAR2(32) -); - -------------------------------------------------------- --- EPersonGroup table -------------------------------------------------------- -CREATE TABLE EPersonGroup -( - eperson_group_id INTEGER PRIMARY KEY, - name VARCHAR2(256) UNIQUE -); - -------------------------------------------------------- --- Item table -------------------------------------------------------- -CREATE TABLE Item -( - item_id INTEGER PRIMARY KEY, - submitter_id INTEGER REFERENCES EPerson(eperson_id), - in_archive NUMBER(1), - withdrawn NUMBER(1), - last_modified TIMESTAMP, - owning_collection INTEGER -); - -------------------------------------------------------- --- Bundle table -------------------------------------------------------- -CREATE TABLE Bundle -( - bundle_id INTEGER PRIMARY KEY, - mets_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - name VARCHAR2(16), -- ORIGINAL | THUMBNAIL | TEXT - primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - -------------------------------------------------------- --- Item2Bundle table -------------------------------------------------------- -CREATE TABLE Item2Bundle -( - id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - bundle_id INTEGER REFERENCES Bundle(bundle_id) -); - --- index by item_id -CREATE INDEX item2bundle_item_idx on Item2Bundle(item_id); - -------------------------------------------------------- --- Bundle2Bitstream table -------------------------------------------------------- -CREATE TABLE Bundle2Bitstream -( - id INTEGER PRIMARY KEY, - bundle_id INTEGER REFERENCES Bundle(bundle_id), - bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - --- index by bundle_id -CREATE INDEX bundle2bitstream_bundle_idx ON Bundle2Bitstream(bundle_id); - -------------------------------------------------------- --- DCTypeRegistry table -------------------------------------------------------- -CREATE TABLE DCTypeRegistry -( - dc_type_id INTEGER PRIMARY KEY, - element VARCHAR2(64), - qualifier VARCHAR2(64), - scope_note VARCHAR2(2000), - UNIQUE(element, qualifier) -); - -------------------------------------------------------- --- DCValue table -------------------------------------------------------- -CREATE TABLE DCValue -( - dc_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - dc_type_id INTEGER REFERENCES DCTypeRegistry(dc_type_id), - text_value VARCHAR2(2000), - text_lang VARCHAR2(24), - place INTEGER, - source_id INTEGER -); - --- An index for item_id - almost all access is based on --- instantiating the item object, which grabs all dcvalues --- related to that item -CREATE INDEX dcvalue_item_idx on DCValue(item_id); - -------------------------------------------------------- --- Community table -------------------------------------------------------- -CREATE TABLE Community -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128) UNIQUE, - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000) -); - -------------------------------------------------------- --- Collection table -------------------------------------------------------- -CREATE TABLE Collection -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license VARCHAR2(2000), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -------------------------------------------------------- --- Community2Community table -------------------------------------------------------- -CREATE TABLE Community2Community -( - id INTEGER PRIMARY KEY, - parent_comm_id INTEGER REFERENCES Community(community_id), - child_comm_id INTEGER REFERENCES Community(community_id) -); - -------------------------------------------------------- --- Community2Collection table -------------------------------------------------------- -CREATE TABLE Community2Collection -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - -------------------------------------------------------- --- Collection2Item table -------------------------------------------------------- -CREATE TABLE Collection2Item -( - id INTEGER PRIMARY KEY, - collection_id INTEGER REFERENCES Collection(collection_id), - item_id INTEGER REFERENCES Item(item_id) -); - --- index by collection_id -CREATE INDEX collection2item_collection_idx ON Collection2Item(collection_id); - -------------------------------------------------------- --- ResourcePolicy table -------------------------------------------------------- -CREATE TABLE ResourcePolicy -( - policy_id INTEGER PRIMARY KEY, - resource_type_id INTEGER, - resource_id INTEGER, - action_id INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - epersongroup_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - start_date DATE, - end_date DATE -); - --- index by resource_type,resource_id - all queries by --- authorization manager are select type=x, id=y, action=z -CREATE INDEX resourcepolicy_type_id_idx ON ResourcePolicy(resource_type_id,resource_id); - -------------------------------------------------------- --- EPersonGroup2EPerson table -------------------------------------------------------- -CREATE TABLE EPersonGroup2EPerson -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - eperson_id INTEGER REFERENCES EPerson(eperson_id) -); - --- Index by group ID (used heavily by AuthorizeManager) -CREATE INDEX epersongroup2eperson_group_idx on EPersonGroup2EPerson(eperson_group_id); - - -------------------------------------------------------- --- Handle table -------------------------------------------------------- -CREATE TABLE Handle -( - handle_id INTEGER PRIMARY KEY, - handle VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER -); - -------------------------------------------------------- --- WorkspaceItem table -------------------------------------------------------- -CREATE TABLE WorkspaceItem -( - workspace_item_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), -- boolean - published_before NUMBER(1), - multiple_files NUMBER(1), - -- How for the user has got in the submit process - stage_reached INTEGER -); - -------------------------------------------------------- --- WorkflowItem table -------------------------------------------------------- -CREATE TABLE WorkflowItem -( - workflow_id INTEGER PRIMARY KEY, - item_id INTEGER UNIQUE REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - state INTEGER, - owner INTEGER REFERENCES EPerson(eperson_id), - - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI - -); - -------------------------------------------------------- --- TasklistItem table -------------------------------------------------------- -CREATE TABLE TasklistItem -( - tasklist_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - workflow_id INTEGER REFERENCES WorkflowItem(workflow_id) -); - - -------------------------------------------------------- --- RegistrationData table -------------------------------------------------------- -CREATE TABLE RegistrationData -( - registrationdata_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - token VARCHAR2(48), - expires TIMESTAMP -); - - -------------------------------------------------------- --- Subscription table -------------------------------------------------------- -CREATE TABLE Subscription -( - subscription_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - - -------------------------------------------------------- --- History table -------------------------------------------------------- -CREATE TABLE History -( - history_id INTEGER PRIMARY KEY, - -- When it was stored - creation_date TIMESTAMP, - -- A checksum to keep INTEGERizations from being stored more than once - checksum VARCHAR2(32) UNIQUE -); - -------------------------------------------------------- --- HistoryState table -------------------------------------------------------- -CREATE TABLE HistoryState -( - history_state_id INTEGER PRIMARY KEY, - object_id VARCHAR2(64) -); - ------------------------------------------------------------- --- Browse subsystem tables and views ------------------------------------------------------------- - -------------------------------------------------------- --- Communities2Item table -------------------------------------------------------- -CREATE TABLE Communities2Item -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - item_id INTEGER REFERENCES Item(item_id) -); - -------------------------------------------------------- --- Community2Item view ------------------------------------------------------- -CREATE VIEW Community2Item as -SELECT Community2Collection.community_id, Collection2Item.item_id -FROM Community2Collection, Collection2Item -WHERE Collection2Item.collection_id = Community2Collection.collection_id -; - -------------------------------------------------------- --- ItemsByAuthor table -------------------------------------------------------- -CREATE TABLE ItemsByAuthor -( - items_by_author_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - author VARCHAR2(2000), - sort_author VARCHAR2(2000) -); - --- index by sort_author, of course! -CREATE INDEX sort_author_idx on ItemsByAuthor(sort_author); - -------------------------------------------------------- --- CollectionItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CollectionItemsByAuthor as -SELECT Collection2Item.collection_id, ItemsByAuthor.* -FROM ItemsByAuthor, Collection2Item -WHERE ItemsByAuthor.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CommunityItemsByAuthor as -SELECT Communities2Item.community_id, ItemsByAuthor.* -FROM ItemsByAuthor, Communities2Item -WHERE ItemsByAuthor.item_id = Communities2Item.item_id -; - ----------------------------------------- --- ItemsByTitle table ----------------------------------------- -CREATE TABLE ItemsByTitle -( - items_by_title_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - title VARCHAR2(2000), - sort_title VARCHAR2(2000) -); - --- index by the sort_title -CREATE INDEX sort_title_idx on ItemsByTitle(sort_title); - - -------------------------------------------------------- --- CollectionItemsByTitle view -------------------------------------------------------- -CREATE VIEW CollectionItemsByTitle as -SELECT Collection2Item.collection_id, ItemsByTitle.* -FROM ItemsByTitle, Collection2Item -WHERE ItemsByTitle.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByTitle view -------------------------------------------------------- -CREATE VIEW CommunityItemsByTitle as -SELECT Communities2Item.community_id, ItemsByTitle.* -FROM ItemsByTitle, Communities2Item -WHERE ItemsByTitle.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDate table -------------------------------------------------------- -CREATE TABLE ItemsByDate -( - items_by_date_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_issued VARCHAR2(2000) -); - --- sort by date -CREATE INDEX date_issued_idx on ItemsByDate(date_issued); - -------------------------------------------------------- --- CollectionItemsByDate view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDate as -SELECT Collection2Item.collection_id, ItemsByDate.* -FROM ItemsByDate, Collection2Item -WHERE ItemsByDate.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDate view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDate as -SELECT Communities2Item.community_id, ItemsByDate.* -FROM ItemsByDate, Communities2Item -WHERE ItemsByDate.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDateAccessioned table -------------------------------------------------------- -CREATE TABLE ItemsByDateAccessioned -( - items_by_date_accessioned_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_accessioned VARCHAR2(2000) -); - -------------------------------------------------------- --- CollectionItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDateAccession as -SELECT Collection2Item.collection_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Collection2Item -WHERE ItemsByDateAccessioned.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDateAccession as -SELECT Communities2Item.community_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Communities2Item -WHERE ItemsByDateAccessioned.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql deleted file mode 100644 index 37d7e115eb53..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql +++ /dev/null @@ -1,57 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE epersongroup2workspaceitem_seq; - -------------------------------------------------------------------------------- --- create the new EPersonGroup2WorkspaceItem table -------------------------------------------------------------------------------- - -CREATE TABLE EPersonGroup2WorkspaceItem -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - workspace_item_id INTEGER REFERENCES WorkspaceItem(workspace_item_id) -); - -------------------------------------------------------------------------------- --- modification to collection table to support being able to change the --- submitter and collection admin group names -------------------------------------------------------------------------------- -ALTER TABLE collection ADD submitter INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE collection ADD admin INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE eperson ADD netid VARCHAR2(64) UNIQUE; - -------------------------------------------------------------------------------- --- Additional indices for performance -------------------------------------------------------------------------------- - --- index by resource id and resource type id -CREATE INDEX handle_resource_id_type_idx ON handle(resource_id, resource_type_id); - --- Indexing browse tables update/re-index performance -CREATE INDEX Communities2Item_item_id_idx ON Communities2Item( item_id ); -CREATE INDEX ItemsByAuthor_item_id_idx ON ItemsByAuthor(item_id); -CREATE INDEX ItemsByTitle_item_id_idx ON ItemsByTitle(item_id); -CREATE INDEX ItemsByDate_item_id_idx ON ItemsByDate(item_id); -CREATE INDEX ItemsByDateAcc_item_id_idx ON ItemsByDateAccessioned(item_id); - --- Improve mapping tables -CREATE INDEX Com2Coll_community_id_idx ON Community2Collection(community_id); -CREATE INDEX Com2Coll_collection_id_idx ON Community2Collection(collection_id); -CREATE INDEX Coll2Item_item_id_idx ON Collection2Item( item_id ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql deleted file mode 100644 index a713ced8bbb2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql +++ /dev/null @@ -1,133 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ---------------------------------------- --- Update MetadataValue to include CLOB ---------------------------------------- - -CREATE TABLE MetadataValueTemp -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value CLOB, - text_lang VARCHAR(64), - place INTEGER -); - -INSERT INTO MetadataValueTemp -SELECT * FROM MetadataValue; - -DROP VIEW dcvalue; -DROP TABLE MetadataValue; -ALTER TABLE MetadataValueTemp RENAME TO MetadataValue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); - ------------------------------------- --- Update Community to include CLOBs ------------------------------------- - -CREATE TABLE CommunityTemp -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text CLOB, - side_bar_text VARCHAR2(2000) -); - -INSERT INTO CommunityTemp -SELECT * FROM Community; - -DROP TABLE Community CASCADE CONSTRAINTS; -ALTER TABLE CommunityTemp RENAME TO Community; - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_parent -FOREIGN KEY (parent_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_child -FOREIGN KEY (child_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -ALTER TABLE Communities2Item ADD CONSTRAINT fk_c2i_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -------------------------------------- --- Update Collection to include CLOBs -------------------------------------- - -CREATE TABLE CollectionTemp -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license CLOB, - copyright_text CLOB, - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - submitter INTEGER REFERENCES EPersonGroup( eperson_group_id ), - admin INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -INSERT INTO CollectionTemp -SELECT * FROM Collection; - -DROP TABLE Collection CASCADE CONSTRAINTS; -ALTER TABLE CollectionTemp RENAME TO Collection; - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Collection2Item ADD CONSTRAINT fk_c2i_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkspaceItem ADD CONSTRAINT fk_wsi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkflowItem ADD CONSTRAINT fk_wfi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Subscription ADD CONSTRAINT fk_subs_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql deleted file mode 100644 index 54cf10067b91..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql +++ /dev/null @@ -1,371 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------- --- Sequences for Group within Group feature -------------------------------------------------------------------------------- -CREATE SEQUENCE group2group_seq; -CREATE SEQUENCE group2groupcache_seq; - ------------------------------------------------------- --- Group2Group table, records group membership in other groups ------------------------------------------------------- -CREATE TABLE Group2Group -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - ------------------------------------------------------- --- Group2GroupCache table, is the 'unwound' hierarchy in --- Group2Group. It explicitly names every parent child --- relationship, even with nested groups. For example, --- If Group2Group lists B is a child of A and C is a child of B, --- this table will have entries for parent(A,B), and parent(B,C) --- AND parent(A,C) so that all of the child groups of A can be --- looked up in a single simple query ------------------------------------------------------- -CREATE TABLE Group2GroupCache -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - - -------------------------------------------------------- --- New Metadata Tables and Sequences -------------------------------------------------------- -CREATE SEQUENCE metadataschemaregistry_seq; -CREATE SEQUENCE metadatafieldregistry_seq; -CREATE SEQUENCE metadatavalue_seq; - --- MetadataSchemaRegistry table -CREATE TABLE MetadataSchemaRegistry -( - metadata_schema_id INTEGER PRIMARY KEY, - namespace VARCHAR(256) UNIQUE, - short_id VARCHAR(32) -); - --- MetadataFieldRegistry table -CREATE TABLE MetadataFieldRegistry -( - metadata_field_id INTEGER PRIMARY KEY, - metadata_schema_id INTEGER NOT NULL REFERENCES MetadataSchemaRegistry(metadata_schema_id), - element VARCHAR(64), - qualifier VARCHAR(64), - scope_note VARCHAR2(2000) -); - --- MetadataValue table -CREATE TABLE MetadataValue -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value VARCHAR2(2000), - text_lang VARCHAR(24), - place INTEGER -); - --- Create the DC schema -INSERT INTO MetadataSchemaRegistry VALUES (1,'http://dublincore.org/documents/dcmi-terms/','dc'); - --- Migrate the existing DCTypes into the new metadata field registry -INSERT INTO MetadataFieldRegistry - (metadata_schema_id, metadata_field_id, element, qualifier, scope_note) - SELECT '1' AS metadata_schema_id, dc_type_id, element, - qualifier, scope_note FROM dctyperegistry; - --- Copy the DCValues into the new MetadataValue table -INSERT INTO MetadataValue (item_id, metadata_field_id, text_value, text_lang, place) - SELECT item_id, dc_type_id, text_value, text_lang, place FROM dcvalue; - -DROP TABLE dcvalue; -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - - --- After copying data from dctypregistry to metadataschemaregistry, we need to reset our sequences --- Update metadatafieldregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_field_id) INTO curr FROM metadatafieldregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatafieldregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatafieldregistry_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadatavalue_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_value_id) INTO curr FROM metadatavalue; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatavalue_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatavalue_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadataschemaregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_schema_id) INTO curr FROM metadataschemaregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadataschemaregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadataschemaregistry_seq START WITH ' || NVL(curr,1); -END; -/ - --- Drop the old dctyperegistry -DROP TABLE dctyperegistry; - --- create indexes for the metadata tables -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); -CREATE INDEX metadatafield_schema_idx ON MetadataFieldRegistry(metadata_schema_id); - - -------------------------------------------------------- --- Create the checksum checker tables -------------------------------------------------------- --- list of the possible results as determined --- by the system or an administrator - -CREATE TABLE checksum_results -( - result_code VARCHAR(64) PRIMARY KEY, - result_description VARCHAR2(2000) -); - - --- This table has a one-to-one relationship --- with the bitstream table. A row will be inserted --- every time a row is inserted into the bitstream table, and --- that row will be updated every time the checksum is --- re-calculated. - -CREATE TABLE most_recent_checksum -( - bitstream_id INTEGER PRIMARY KEY, - to_be_processed NUMBER(1) NOT NULL, - expected_checksum VARCHAR(64) NOT NULL, - current_checksum VARCHAR(64) NOT NULL, - last_process_start_date TIMESTAMP NOT NULL, - last_process_end_date TIMESTAMP NOT NULL, - checksum_algorithm VARCHAR(64) NOT NULL, - matched_prev_checksum NUMBER(1) NOT NULL, - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - - --- A row will be inserted into this table every --- time a checksum is re-calculated. - -CREATE SEQUENCE checksum_history_seq; - -CREATE TABLE checksum_history -( - check_id INTEGER PRIMARY KEY, - bitstream_id INTEGER, - process_start_date TIMESTAMP, - process_end_date TIMESTAMP, - checksum_expected VARCHAR(64), - checksum_calculated VARCHAR(64), - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - --- this will insert into the result code --- the initial results - -insert into checksum_results -values -( - 'INVALID_HISTORY', - 'Install of the cheksum checking code do not consider this history as valid' -); - -insert into checksum_results -values -( - 'BITSTREAM_NOT_FOUND', - 'The bitstream could not be found' -); - -insert into checksum_results -values -( - 'CHECKSUM_MATCH', - 'Current checksum matched previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_NO_MATCH', - 'Current checksum does not match previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_PREV_NOT_FOUND', - 'Previous checksum was not found: no comparison possible' -); - -insert into checksum_results -values -( - 'BITSTREAM_INFO_NOT_FOUND', - 'Bitstream info not found' -); - -insert into checksum_results -values -( - 'CHECKSUM_ALGORITHM_INVALID', - 'Invalid checksum algorithm' -); -insert into checksum_results -values -( - 'BITSTREAM_NOT_PROCESSED', - 'Bitstream marked to_be_processed=false' -); -insert into checksum_results -values -( - 'BITSTREAM_MARKED_DELETED', - 'Bitstream marked deleted in bitstream table' -); - --- this will insert into the most recent checksum --- on install all existing bitstreams --- setting all bitstreams already set as --- deleted to not be processed - -insert into most_recent_checksum -( - bitstream_id, - to_be_processed, - expected_checksum, - current_checksum, - last_process_start_date, - last_process_end_date, - checksum_algorithm, - matched_prev_checksum -) -select - bitstream.bitstream_id, - '1', - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END, - '1' -from bitstream; - --- Update all the deleted checksums --- to not be checked --- because they have since been --- deleted from the system - -update most_recent_checksum -set to_be_processed = 0 -where most_recent_checksum.bitstream_id in ( -select bitstream_id -from bitstream where deleted = '1' ); - --- this will insert into history table --- for the initial start --- we want to tell the users to disregard the initial --- inserts into the checksum history table - -insert into checksum_history -( - bitstream_id, - process_start_date, - process_end_date, - checksum_expected, - checksum_calculated -) -select most_recent_checksum.bitstream_id, - most_recent_checksum.last_process_end_date, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - most_recent_checksum.expected_checksum, - most_recent_checksum.expected_checksum -FROM most_recent_checksum; - --- update the history to indicate that this was --- the first time the software was installed -update checksum_history -set result = 'INVALID_HISTORY'; - - -------------------------------------------------------- --- Table and views for 'browse by subject' functionality -------------------------------------------------------- -CREATE SEQUENCE itemsbysubject_seq; - -------------------------------------------------------- --- ItemsBySubject table -------------------------------------------------------- -CREATE TABLE ItemsBySubject -( - items_by_subject_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - subject VARCHAR2(2000), - sort_subject VARCHAR2(2000) -); - --- index by sort_subject -CREATE INDEX sort_subject_idx on ItemsBySubject(sort_subject); - -------------------------------------------------------- --- CollectionItemsBySubject view -------------------------------------------------------- -CREATE VIEW CollectionItemsBySubject as -SELECT Collection2Item.collection_id, ItemsBySubject.* -FROM ItemsBySubject, Collection2Item -WHERE ItemsBySubject.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsBySubject view -------------------------------------------------------- -CREATE VIEW CommunityItemsBySubject as -SELECT Communities2Item.community_id, ItemsBySubject.* -FROM ItemsBySubject, Communities2Item -WHERE ItemsBySubject.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql deleted file mode 100644 index bb217bd0d18d..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql +++ /dev/null @@ -1,142 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Remove NOT NULL restrictions from the checksum columns of most_recent_checksum -ALTER TABLE most_recent_checksum MODIFY expected_checksum null; -ALTER TABLE most_recent_checksum MODIFY current_checksum null; - ------------------------------------------------------- --- New Column language language in EPerson ------------------------------------------------------- - -alter table eperson ADD language VARCHAR2(64); -update eperson set language = 'en'; - --- totally unused column -alter table bundle drop column mets_bitstream_id; - -------------------------------------------------------------------------------- --- Necessary for Configurable Submission functionality: --- Modification to workspaceitem table to support keeping track --- of the last page reached within a step in the Configurable Submission Process -------------------------------------------------------------------------------- -ALTER TABLE workspaceitem ADD page_reached INTEGER; - - -------------------------------------------------------------------------- --- Increase the mimetype field size to support larger types, such as the --- new Word 2007 mimetypes. -------------------------------------------------------------------------- -ALTER TABLE BitstreamFormatRegistry MODIFY (mimetype VARCHAR(256)); - - -------------------------------------------------------------------------- --- Tables to manage cache of item counts for communities and collections -------------------------------------------------------------------------- - -CREATE TABLE collection_item_count ( - collection_id INTEGER PRIMARY KEY REFERENCES collection(collection_id), - count INTEGER -); - -CREATE TABLE community_item_count ( - community_id INTEGER PRIMARY KEY REFERENCES community(community_id), - count INTEGER -); - ------------------------------------------------------------------- --- Remove sequences and tables of the old browse system ------------------------------------------------------------------- - -DROP SEQUENCE itemsbyauthor_seq; -DROP SEQUENCE itemsbytitle_seq; -DROP SEQUENCE itemsbydate_seq; -DROP SEQUENCE itemsbydateaccessioned_seq; -DROP SEQUENCE itemsbysubject_seq; - -DROP TABLE ItemsByAuthor CASCADE CONSTRAINTS; -DROP TABLE ItemsByTitle CASCADE CONSTRAINTS; -DROP TABLE ItemsByDate CASCADE CONSTRAINTS; -DROP TABLE ItemsByDateAccessioned CASCADE CONSTRAINTS; -DROP TABLE ItemsBySubject CASCADE CONSTRAINTS; - -DROP TABLE History CASCADE CONSTRAINTS; -DROP TABLE HistoryState CASCADE CONSTRAINTS; - ----------------------------------------------------------------- --- Add indexes for foreign key columns ----------------------------------------------------------------- - -CREATE INDEX fe_bitstream_fk_idx ON FileExtension(bitstream_format_id); - -CREATE INDEX bit_bitstream_fk_idx ON Bitstream(bitstream_format_id); - -CREATE INDEX g2g_parent_fk_idx ON Group2Group(parent_id); -CREATE INDEX g2g_child_fk_idx ON Group2Group(child_id); - --- CREATE INDEX g2gc_parent_fk_idx ON Group2Group(parent_id); --- CREATE INDEX g2gc_child_fk_idx ON Group2Group(child_id); - -CREATE INDEX item_submitter_fk_idx ON Item(submitter_id); - -CREATE INDEX bundle_primary_fk_idx ON Bundle(primary_bitstream_id); - -CREATE INDEX item2bundle_bundle_fk_idx ON Item2Bundle(bundle_id); - -CREATE INDEX bundle2bits_bitstream_fk_idx ON Bundle2Bitstream(bitstream_id); - -CREATE INDEX metadatavalue_field_fk_idx ON MetadataValue(metadata_field_id); - -CREATE INDEX community_logo_fk_idx ON Community(logo_bitstream_id); - -CREATE INDEX collection_logo_fk_idx ON Collection(logo_bitstream_id); -CREATE INDEX collection_template_fk_idx ON Collection(template_item_id); -CREATE INDEX collection_workflow1_fk_idx ON Collection(workflow_step_1); -CREATE INDEX collection_workflow2_fk_idx ON Collection(workflow_step_2); -CREATE INDEX collection_workflow3_fk_idx ON Collection(workflow_step_3); -CREATE INDEX collection_submitter_fk_idx ON Collection(submitter); -CREATE INDEX collection_admin_fk_idx ON Collection(admin); - -CREATE INDEX com2com_parent_fk_idx ON Community2Community(parent_comm_id); -CREATE INDEX com2com_child_fk_idx ON Community2Community(child_comm_id); - -CREATE INDEX rp_eperson_fk_idx ON ResourcePolicy(eperson_id); -CREATE INDEX rp_epersongroup_fk_idx ON ResourcePolicy(epersongroup_id); - -CREATE INDEX epg2ep_eperson_fk_idx ON EPersonGroup2EPerson(eperson_id); - -CREATE INDEX workspace_item_fk_idx ON WorkspaceItem(item_id); -CREATE INDEX workspace_coll_fk_idx ON WorkspaceItem(collection_id); - --- CREATE INDEX workflow_item_fk_idx ON WorkflowItem(item_id); -CREATE INDEX workflow_coll_fk_idx ON WorkflowItem(collection_id); -CREATE INDEX workflow_owner_fk_idx ON WorkflowItem(owner); - -CREATE INDEX tasklist_eperson_fk_idx ON TasklistItem(eperson_id); -CREATE INDEX tasklist_workflow_fk_idx ON TasklistItem(workflow_id); - -CREATE INDEX subs_eperson_fk_idx ON Subscription(eperson_id); -CREATE INDEX subs_collection_fk_idx ON Subscription(collection_id); - -CREATE INDEX epg2wi_group_fk_idx ON epersongroup2workspaceitem(eperson_group_id); -CREATE INDEX epg2wi_workspace_fk_idx ON epersongroup2workspaceitem(workspace_item_id); - -CREATE INDEX Comm2Item_community_fk_idx ON Communities2Item( community_id ); - -CREATE INDEX mrc_result_fk_idx ON most_recent_checksum( result ); - -CREATE INDEX ch_result_fk_idx ON checksum_history( result ); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql deleted file mode 100644 index 659ca32983cc..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql +++ /dev/null @@ -1,93 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- New Column for Community Admin - Delegated Admin patch (DS-228) ------------------------------------------------------------------- -ALTER TABLE community ADD admin INTEGER REFERENCES epersongroup ( eperson_group_id ); -CREATE INDEX community_admin_fk_idx ON Community(admin); - -------------------------------------------------------------------------- --- DS-236 schema changes for Authority Control of Metadata Values -------------------------------------------------------------------------- -ALTER TABLE MetadataValue - ADD ( authority VARCHAR(100), - confidence INTEGER DEFAULT -1); - --------------------------------------------------------------------------- --- DS-295 CC License being assigned incorrect Mime Type during submission. --------------------------------------------------------------------------- -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'CC License') - WHERE name = 'license_text' AND source = 'org.dspace.license.CreativeCommons'; - -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'RDF XML') - WHERE name = 'license_rdf' AND source = 'org.dspace.license.CreativeCommons'; - -------------------------------------------------------------------------- --- DS-260 Cleanup of Owning collection column for template item created --- with the JSPUI after the collection creation -------------------------------------------------------------------------- -UPDATE item SET owning_collection = null WHERE item_id IN - (SELECT template_item_id FROM collection WHERE template_item_id IS NOT null); - --- Recreate restraints with a know name and deferrable option! --- (The previous version of these constraints is dropped by org.dspace.storage.rdbms.migration.V1_5_9__Drop_constraint_for_DSpace_1_6_schema) -ALTER TABLE community2collection ADD CONSTRAINT comm2coll_collection_fk FOREIGN KEY (collection_id) REFERENCES collection DEFERRABLE; -ALTER TABLE community2community ADD CONSTRAINT com2com_child_fk FOREIGN KEY (child_comm_id) REFERENCES community DEFERRABLE; -ALTER TABLE collection2item ADD CONSTRAINT coll2item_item_fk FOREIGN KEY (item_id) REFERENCES item DEFERRABLE; - - ------------------------------------------------------------------- --- New tables /sequences for the harvester functionality (DS-289) ------------------------------------------------------------------- -CREATE SEQUENCE harvested_collection_seq; -CREATE SEQUENCE harvested_item_seq; - -------------------------------------------------------- --- Create the harvest settings table -------------------------------------------------------- --- Values used by the OAIHarvester to harvest a collection --- HarvestInstance is the DAO class for this table - -CREATE TABLE harvested_collection -( - collection_id INTEGER REFERENCES collection(collection_id) ON DELETE CASCADE, - harvest_type INTEGER, - oai_source VARCHAR(256), - oai_set_id VARCHAR(256), - harvest_message VARCHAR2(512), - metadata_config_id VARCHAR(256), - harvest_status INTEGER, - harvest_start_time TIMESTAMP, - last_harvested TIMESTAMP, - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_collection_fk_idx ON harvested_collection(collection_id); - - -CREATE TABLE harvested_item -( - item_id INTEGER REFERENCES item(item_id) ON DELETE CASCADE, - last_harvested TIMESTAMP, - oai_id VARCHAR(64), - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_item_fk_idx ON harvested_item(item_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql deleted file mode 100644 index f4b2737fb3a8..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- Remove unused / obsolete sequence 'dctyperegistry_seq' (DS-729) ------------------------------------------------------------------- -DROP SEQUENCE dctyperegistry_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql deleted file mode 100644 index f96cddbe7fd4..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- New column for bitstream order DS-749 -- -------------------------------------------- -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; - ---Place the sequence id's in the order -UPDATE bundle2bitstream SET bitstream_order=(SELECT sequence_id FROM bitstream WHERE bitstream.bitstream_id=bundle2bitstream.bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql deleted file mode 100644 index 472dc7dc5279..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql +++ /dev/null @@ -1,52 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -ALTER TABLE resourcepolicy - ADD ( - rpname VARCHAR2(30), - rptype VARCHAR2(30), - rpdescription VARCHAR2(100) - ); - - -ALTER TABLE item ADD discoverable NUMBER(1); - -CREATE TABLE versionhistory -( - versionhistory_id INTEGER NOT NULL PRIMARY KEY -); - -CREATE TABLE versionitem -( - versionitem_id INTEGER NOT NULL PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - version_number INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - version_date TIMESTAMP, - version_summary VARCHAR2(255), - versionhistory_id INTEGER REFERENCES VersionHistory(versionhistory_id) -); - -CREATE SEQUENCE versionitem_seq; -CREATE SEQUENCE versionhistory_seq; - - -------------------------------------------- --- New columns and longer hash for salted password hashing DS-861 -- -------------------------------------------- -ALTER TABLE EPerson modify( password VARCHAR(128)); -ALTER TABLE EPerson ADD salt VARCHAR(32); -ALTER TABLE EPerson ADD digest_algorithm VARCHAR(16); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql deleted file mode 100644 index 8102376906a3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql +++ /dev/null @@ -1,88 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- Ensure that discoverable has a sensible default -------------------------------------------- -update item set discoverable=1 WHERE discoverable IS NULL; - -------------------------------------------- --- Add support for DOIs (table and seq.) -- -------------------------------------------- - -CREATE TABLE Doi -( - doi_id INTEGER PRIMARY KEY, - doi VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER, - status INTEGER -); - -CREATE SEQUENCE doi_seq; - --- index by resource id and resource type id -CREATE INDEX doi_resource_id_type_idx ON doi(resource_id, resource_type_id); - -------------------------------------------- --- Table of running web applications for 'dspace version' -- -------------------------------------------- - -CREATE TABLE Webapp -( - webapp_id INTEGER NOT NULL PRIMARY KEY, - AppName VARCHAR2(32), - URL VARCHAR2(1000), - Started TIMESTAMP, - isUI NUMBER(1) -); - -CREATE SEQUENCE webapp_seq; - -------------------------------------------------------- --- DS-824 RequestItem table -------------------------------------------------------- - -CREATE TABLE requestitem -( - requestitem_id INTEGER NOT NULL, - token varchar(48), - item_id INTEGER, - bitstream_id INTEGER, - allfiles NUMBER(1), - request_email VARCHAR2(64), - request_name VARCHAR2(64), - request_date TIMESTAMP, - accept_request NUMBER(1), - decision_date TIMESTAMP, - expires TIMESTAMP, - CONSTRAINT requestitem_pkey PRIMARY KEY (requestitem_id), - CONSTRAINT requestitem_token_key UNIQUE (token) -); - -CREATE SEQUENCE requestitem_seq; - -------------------------------------------------------- --- DS-1655 Disable "Initial Questions" page in Submission UI by default -------------------------------------------------------- -update workspaceitem set multiple_titles=1, published_before=1, multiple_files=1; -update workflowitem set multiple_titles=1, published_before=1, multiple_files=1; - -------------------------------------------------------- --- DS-1811 Removing a collection fails if non-Solr DAO has been used before for item count -------------------------------------------------------- -delete from collection_item_count; -delete from community_item_count; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql deleted file mode 100644 index 6d75905ec980..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql +++ /dev/null @@ -1,64 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Special case of migration, we need to the EPerson schema in order to get our metadata for all queries to work --- but we cannot a DB connection until our database is up to date, so we need to create our registries manually in sql - -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/eperson' as namespace, 'eperson' as short_id FROM dual - WHERE NOT EXISTS (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry WHERE namespace = 'http://dspace.org/eperson' AND short_id = 'eperson'); - - --- Insert eperson.firstname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'firstname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'firstname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.lastname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'lastname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'lastname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.phone -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'phone' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'phone' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.language -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'language' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'language' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert into dc.provenance -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'provenance' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'provenance' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); - --- Insert into dc.rights.license -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'rights', 'license' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry WHERE element = 'rights' AND qualifier='license' AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql deleted file mode 100644 index c86cfe31223e..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1945 RequestItem Helpdesk, store request message ------------------------------------------------------- -ALTER TABLE requestitem ADD request_message VARCHAR2(2000); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql deleted file mode 100644 index 8f0cd0d5e1d7..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql +++ /dev/null @@ -1,333 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1582 Metadata on all DSpace Objects --- NOTE: This script also has a complimentary Flyway Java Migration --- which drops the "item_id" constraint on metadatavalue --- org.dspace.storage.rdbms.migration.V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint ------------------------------------------------------- -alter table metadatavalue rename column item_id to resource_id; - -alter table metadatavalue MODIFY(resource_id not null); -alter table metadatavalue add resource_type_id integer; -UPDATE metadatavalue SET resource_type_id = 2; -alter table metadatavalue MODIFY(resource_type_id not null); - - - --- --------- --- community --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM community where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM community where not name is null; - -alter table community drop (introductory_text, short_description, side_bar_text, copyright_text, name); - - --- ---------- --- collection --- ---------- - - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM collection where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id, -provenance_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not provenance_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id, -license AS text_value, -null AS text_lang, -0 AS place -FROM collection where not license is null; - -alter table collection drop (introductory_text, short_description, copyright_text, side_bar_text, name, license, provenance_description); - - --- --------- --- bundle --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bundle_id AS resource_id, -1 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bundle where not name is null; - -alter table bundle drop column name; - - - --- --------- --- bitstream --- --------- - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id, -user_format_description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not user_format_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id, -source AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not source is null; - -alter table bitstream drop (name, description, user_format_description, source); - - --- --------- --- epersongroup --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_group_id AS resource_id, -6 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM epersongroup where not name is null; - -alter table epersongroup drop column name; - - - --- --------- --- eperson --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id, -firstname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not firstname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id, -lastname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not lastname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id, -phone AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not phone is null; - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id, -language AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not language is null; - -alter table eperson drop (firstname, lastname, phone, language); - --- --------- --- dcvalue view --- --------- - -drop view dcvalue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.resource_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1 AND MetadataValue.resource_type_id = 2; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql deleted file mode 100644 index 2e09b807de3b..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql deleted file mode 100644 index 9f9836faf471..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3563 Missing database index on metadatavalue.resource_type_id ------------------------------------------------------- --- Create an index on the metadata value resource_type_id column so that it can be searched efficiently. -declare - index_not_exists EXCEPTION; - PRAGMA EXCEPTION_INIT(index_not_exists, -1418); -begin - - execute immediate 'DROP INDEX metadatavalue_type_id_idx'; - exception - when index_not_exists then null; -end; -/ -CREATE INDEX metadatavalue_type_id_idx ON metadatavalue (resource_type_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql deleted file mode 100644 index dd857e763df0..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql +++ /dev/null @@ -1,469 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -DROP VIEW community2item; - -CREATE TABLE dspaceobject -( - uuid RAW(16) NOT NULL PRIMARY KEY -); - -CREATE TABLE site -( - uuid RAW(16) NOT NULL PRIMARY KEY REFERENCES dspaceobject(uuid) -); - -ALTER TABLE eperson ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM eperson; -ALTER TABLE eperson ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE eperson MODIFY uuid NOT NULL; -ALTER TABLE eperson ADD CONSTRAINT eperson_id_unique PRIMARY KEY (uuid); -UPDATE eperson SET require_certificate = '0' WHERE require_certificate IS NULL; -UPDATE eperson SET self_registered = '0' WHERE self_registered IS NULL; - - - -UPDATE metadatavalue SET text_value='Administrator' - WHERE resource_type_id=6 AND resource_id=1; -UPDATE metadatavalue SET text_value='Anonymous' - WHERE resource_type_id=6 AND resource_id=0; - -ALTER TABLE epersongroup ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM epersongroup; -ALTER TABLE epersongroup ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE epersongroup MODIFY uuid NOT NULL; -ALTER TABLE epersongroup ADD CONSTRAINT epersongroup_id_unique PRIMARY KEY (uuid); - -ALTER TABLE item ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM item; -ALTER TABLE item ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE item MODIFY uuid NOT NULL; -ALTER TABLE item ADD CONSTRAINT item_id_unique PRIMARY KEY (uuid); - -ALTER TABLE community ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM community; -ALTER TABLE community ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE community MODIFY uuid NOT NULL; -ALTER TABLE community ADD CONSTRAINT community_id_unique PRIMARY KEY (uuid); - - -ALTER TABLE collection ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM collection; -ALTER TABLE collection ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE collection MODIFY uuid NOT NULL; -ALTER TABLE collection ADD CONSTRAINT collection_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bundle ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bundle; -ALTER TABLE bundle ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bundle MODIFY uuid NOT NULL; -ALTER TABLE bundle ADD CONSTRAINT bundle_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bitstream ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bitstream; -ALTER TABLE bitstream ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bitstream MODIFY uuid NOT NULL; -ALTER TABLE bitstream ADD CONSTRAINT bitstream_id_unique PRIMARY KEY (uuid); -UPDATE bitstream SET sequence_id = -1 WHERE sequence_id IS NULL; -UPDATE bitstream SET size_bytes = -1 WHERE size_bytes IS NULL; -UPDATE bitstream SET deleted = '0' WHERE deleted IS NULL; -UPDATE bitstream SET store_number = -1 WHERE store_number IS NULL; - --- Migrate EPersonGroup2EPerson table -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE EPersonGroup2EPerson ADD eperson_group_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE EPersonGroup2EPerson ADD eperson_id RAW(16) REFERENCES Eperson(uuid); -CREATE INDEX EpersonGroup2Eperson_group on EpersonGroup2Eperson(eperson_group_id); -CREATE INDEX EpersonGroup2Eperson_person on EpersonGroup2Eperson(eperson_id); -UPDATE EPersonGroup2EPerson SET eperson_group_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE EPersonGroup2EPerson.eperson_group_legacy_id = EPersonGroup.eperson_group_id); -UPDATE EPersonGroup2EPerson SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE EPersonGroup2EPerson.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_group_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_legacy_id; -ALTER TABLE epersongroup2eperson DROP COLUMN id; -ALTER TABLE EPersonGroup2EPerson add CONSTRAINT EPersonGroup2EPerson_unique primary key (eperson_group_id,eperson_id); - --- Migrate GROUP2GROUP table -ALTER TABLE Group2Group RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2Group RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2Group ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2Group ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2Group_parent on Group2Group(parent_id); -CREATE INDEX Group2Group_child on Group2Group(child_id); -UPDATE Group2Group SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2Group.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2Group SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2Group.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2Group MODIFY parent_id NOT NULL; -ALTER TABLE Group2Group MODIFY child_id NOT NULL; -ALTER TABLE Group2Group DROP COLUMN parent_legacy_id; -ALTER TABLE Group2Group DROP COLUMN child_legacy_id; -ALTER TABLE Group2Group DROP COLUMN id; -ALTER TABLE Group2Group add CONSTRAINT Group2Group_unique primary key (parent_id,child_id); - --- Migrate collection2item -ALTER TABLE Collection2Item RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Collection2Item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE Collection2Item ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE Collection2Item ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX Collecion2Item_collection on Collection2Item(collection_id); -CREATE INDEX Collecion2Item_item on Collection2Item(item_id); -UPDATE Collection2Item SET collection_id = (SELECT Collection.uuid FROM Collection WHERE Collection2Item.collection_legacy_id = Collection.collection_id); -UPDATE Collection2Item SET item_id = (SELECT Item.uuid FROM Item WHERE Collection2Item.item_legacy_id = Item.item_id); -ALTER TABLE Collection2Item MODIFY collection_id NOT NULL; -ALTER TABLE Collection2Item MODIFY item_id NOT NULL; -ALTER TABLE Collection2Item DROP COLUMN collection_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN item_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN id; --- Magic query that will delete all duplicate collection item_id references from the database (if we don't do this the primary key creation will fail) -DELETE FROM collection2item WHERE rowid NOT IN (SELECT MIN(rowid) FROM collection2item GROUP BY collection_id,item_id); -ALTER TABLE Collection2Item add CONSTRAINT collection2item_unique primary key (collection_id,item_id); - --- Migrate Community2Community -ALTER TABLE Community2Community RENAME COLUMN parent_comm_id to parent_legacy_id; -ALTER TABLE Community2Community RENAME COLUMN child_comm_id to child_legacy_id; -ALTER TABLE Community2Community ADD parent_comm_id RAW(16) REFERENCES Community(uuid); -ALTER TABLE Community2Community ADD child_comm_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX Community2Community_parent on Community2Community(parent_comm_id); -CREATE INDEX Community2Community_child on Community2Community(child_comm_id); -UPDATE Community2Community SET parent_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.parent_legacy_id = Community.community_id); -UPDATE Community2Community SET child_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.child_legacy_id = Community.community_id); -ALTER TABLE Community2Community MODIFY parent_comm_id NOT NULL; -ALTER TABLE Community2Community MODIFY child_comm_id NOT NULL; -ALTER TABLE Community2Community DROP COLUMN parent_legacy_id; -ALTER TABLE Community2Community DROP COLUMN child_legacy_id; -ALTER TABLE Community2Community DROP COLUMN id; -ALTER TABLE Community2Community add CONSTRAINT Community2Community_unique primary key (parent_comm_id,child_comm_id); - --- Migrate community2collection -ALTER TABLE community2collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE community2collection RENAME COLUMN community_id to community_legacy_id; -ALTER TABLE community2collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE community2collection ADD community_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX community2collection_collectio on community2collection(collection_id); -CREATE INDEX community2collection_community on community2collection(community_id); -UPDATE community2collection SET collection_id = (SELECT Collection.uuid FROM Collection WHERE community2collection.collection_legacy_id = Collection.collection_id); -UPDATE community2collection SET community_id = (SELECT Community.uuid FROM Community WHERE community2collection.community_legacy_id = Community.community_id); -ALTER TABLE community2collection MODIFY collection_id NOT NULL; -ALTER TABLE community2collection MODIFY community_id NOT NULL; -ALTER TABLE community2collection DROP COLUMN collection_legacy_id; -ALTER TABLE community2collection DROP COLUMN community_legacy_id; -ALTER TABLE community2collection DROP COLUMN id; -ALTER TABLE community2collection add CONSTRAINT community2collection_unique primary key (collection_id,community_id); - - --- Migrate Group2GroupCache table -ALTER TABLE Group2GroupCache RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2GroupCache RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2GroupCache ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2GroupCache ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2GroupCache_parent on Group2GroupCache(parent_id); -CREATE INDEX Group2GroupCache_child on Group2GroupCache(child_id); -UPDATE Group2GroupCache SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2GroupCache SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2GroupCache MODIFY parent_id NOT NULL; -ALTER TABLE Group2GroupCache MODIFY child_id NOT NULL; -ALTER TABLE Group2GroupCache DROP COLUMN parent_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN child_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN id; -ALTER TABLE Group2GroupCache add CONSTRAINT Group2GroupCache_unique primary key (parent_id,child_id); - --- Migrate Item2Bundle -ALTER TABLE item2bundle RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE item2bundle RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE item2bundle ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE item2bundle ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX item2bundle_bundle on item2bundle(bundle_id); -CREATE INDEX item2bundle_item on item2bundle(item_id); -UPDATE item2bundle SET bundle_id = (SELECT Bundle.uuid FROM Bundle WHERE item2bundle.bundle_legacy_id = Bundle.bundle_id); -UPDATE item2bundle SET item_id = (SELECT Item.uuid FROM Item WHERE item2bundle.item_legacy_id = Item.item_id); -ALTER TABLE item2bundle MODIFY bundle_id NOT NULL; -ALTER TABLE item2bundle MODIFY item_id NOT NULL; -ALTER TABLE item2bundle DROP COLUMN bundle_legacy_id; -ALTER TABLE item2bundle DROP COLUMN item_legacy_id; -ALTER TABLE item2bundle DROP COLUMN id; -ALTER TABLE item2bundle add CONSTRAINT item2bundle_unique primary key (bundle_id,item_id); - ---Migrate Bundle2Bitsteam -ALTER TABLE bundle2bitstream RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE bundle2bitstream ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE bundle2bitstream ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle2bitstream_bundle on bundle2bitstream(bundle_id); -CREATE INDEX bundle2bitstream_bitstream on bundle2bitstream(bitstream_id); -UPDATE bundle2bitstream SET bundle_id = (SELECT bundle.uuid FROM bundle WHERE bundle2bitstream.bundle_legacy_id = bundle.bundle_id); -UPDATE bundle2bitstream SET bitstream_id = (SELECT bitstream.uuid FROM bitstream WHERE bundle2bitstream.bitstream_legacy_id = bitstream.bitstream_id); -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_order to bitstream_order_legacy; -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; -MERGE INTO bundle2bitstream dst -USING ( SELECT ROWID AS r_id - , ROW_NUMBER () OVER ( PARTITION BY bundle_id - ORDER BY bitstream_order_legacy, bitstream_id - ) AS new_order - FROM bundle2bitstream - ) src -ON (dst.ROWID = src.r_id) -WHEN MATCHED THEN UPDATE -SET dst.bitstream_order = (src.new_order-1) -; -ALTER TABLE bundle2bitstream MODIFY bundle_id NOT NULL; -ALTER TABLE bundle2bitstream MODIFY bitstream_id NOT NULL; -ALTER TABLE bundle2bitstream DROP COLUMN bundle_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN bitstream_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN id; -ALTER TABLE bundle2bitstream add CONSTRAINT bundle2bitstream_unique primary key (bitstream_id,bundle_id,bitstream_order); - - --- Migrate item -ALTER TABLE item RENAME COLUMN submitter_id to submitter_id_legacy_id; -ALTER TABLE item ADD submitter_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX item_submitter on item(submitter_id); -UPDATE item SET submitter_id = (SELECT eperson.uuid FROM eperson WHERE item.submitter_id_legacy_id = eperson.eperson_id); -ALTER TABLE item DROP COLUMN submitter_id_legacy_id; - -ALTER TABLE item RENAME COLUMN owning_collection to owning_collection_legacy; -ALTER TABLE item ADD owning_collection RAW(16) REFERENCES Collection(uuid); -CREATE INDEX item_collection on item(owning_collection); -UPDATE item SET owning_collection = (SELECT Collection.uuid FROM Collection WHERE item.owning_collection_legacy = collection.collection_id); -ALTER TABLE item DROP COLUMN owning_collection_legacy; - -UPDATE item SET in_archive = '0' WHERE in_archive IS NULL; -UPDATE item SET discoverable = '0' WHERE discoverable IS NULL; -UPDATE item SET withdrawn = '0' WHERE withdrawn IS NULL; - --- Migrate bundle -ALTER TABLE bundle RENAME COLUMN primary_bitstream_id to primary_bitstream_legacy_id; -ALTER TABLE bundle ADD primary_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle_primary on bundle(primary_bitstream_id); -UPDATE bundle SET primary_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE bundle.primary_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE bundle DROP COLUMN primary_bitstream_legacy_id; - - --- Migrate community references -ALTER TABLE Community RENAME COLUMN admin to admin_legacy; -ALTER TABLE Community ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Community_admin on Community(admin); -UPDATE Community SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Community.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Community DROP COLUMN admin_legacy; - -ALTER TABLE Community RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Community ADD logo_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX Community_bitstream on Community(logo_bitstream_id); -UPDATE Community SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Community.logo_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE Community DROP COLUMN logo_bitstream_legacy_id; - - ---Migrate Collection references -ALTER TABLE Collection RENAME COLUMN workflow_step_1 to workflow_step_1_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_2 to workflow_step_2_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_3 to workflow_step_3_legacy; -ALTER TABLE Collection RENAME COLUMN submitter to submitter_legacy; -ALTER TABLE Collection RENAME COLUMN template_item_id to template_item_legacy_id; -ALTER TABLE Collection RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Collection RENAME COLUMN admin to admin_legacy; -ALTER TABLE Collection ADD workflow_step_1 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_2 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_3 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD submitter RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD template_item_id RAW(16); -ALTER TABLE Collection ADD logo_bitstream_id RAW(16); -ALTER TABLE Collection ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Collection_workflow1 on Collection(workflow_step_1); -CREATE INDEX Collection_workflow2 on Collection(workflow_step_2); -CREATE INDEX Collection_workflow3 on Collection(workflow_step_3); -CREATE INDEX Collection_submitter on Collection(submitter); -CREATE INDEX Collection_template on Collection(template_item_id); -CREATE INDEX Collection_bitstream on Collection(logo_bitstream_id); -UPDATE Collection SET workflow_step_1 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_1_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_2 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_2_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_3 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_3_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET submitter = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.submitter_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET template_item_id = (SELECT Item.uuid FROM Item WHERE Collection.template_item_legacy_id = Item.item_id); -UPDATE Collection SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Collection.logo_bitstream_legacy_id = Bitstream.bitstream_id); -UPDATE Collection SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Collection DROP COLUMN workflow_step_1_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_2_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_3_legacy; -ALTER TABLE Collection DROP COLUMN submitter_legacy; -ALTER TABLE Collection DROP COLUMN template_item_legacy_id; -ALTER TABLE Collection DROP COLUMN logo_bitstream_legacy_id; -ALTER TABLE Collection DROP COLUMN admin_legacy; - - --- Migrate resource policy references -ALTER TABLE ResourcePolicy RENAME COLUMN eperson_id to eperson_id_legacy_id; -ALTER TABLE ResourcePolicy ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX resourcepolicy_person on resourcepolicy(eperson_id); -UPDATE ResourcePolicy SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.eperson_id_legacy_id = eperson.eperson_id); -ALTER TABLE ResourcePolicy DROP COLUMN eperson_id_legacy_id; - -ALTER TABLE ResourcePolicy RENAME COLUMN epersongroup_id to epersongroup_id_legacy_id; -ALTER TABLE ResourcePolicy ADD epersongroup_id RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX resourcepolicy_group on resourcepolicy(epersongroup_id); -UPDATE ResourcePolicy SET epersongroup_id = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.epersongroup_id_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE ResourcePolicy DROP COLUMN epersongroup_id_legacy_id; - -ALTER TABLE ResourcePolicy ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -UPDATE ResourcePolicy SET dspace_object = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.resource_id = eperson.eperson_id AND ResourcePolicy.resource_type_id = 7) WHERE ResourcePolicy.resource_type_id = 7; -UPDATE ResourcePolicy SET dspace_object = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.resource_id = epersongroup.eperson_group_id AND ResourcePolicy.resource_type_id = 6) WHERE ResourcePolicy.resource_type_id = 6; -UPDATE ResourcePolicy SET dspace_object = (SELECT community.uuid FROM community WHERE ResourcePolicy.resource_id = community.community_id AND ResourcePolicy.resource_type_id = 4) WHERE ResourcePolicy.resource_type_id = 4; -UPDATE ResourcePolicy SET dspace_object = (SELECT collection.uuid FROM collection WHERE ResourcePolicy.resource_id = collection.collection_id AND ResourcePolicy.resource_type_id = 3) WHERE ResourcePolicy.resource_type_id = 3; -UPDATE ResourcePolicy SET dspace_object = (SELECT item.uuid FROM item WHERE ResourcePolicy.resource_id = item.item_id AND ResourcePolicy.resource_type_id = 2) WHERE ResourcePolicy.resource_type_id = 2; -UPDATE ResourcePolicy SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE ResourcePolicy.resource_id = bundle.bundle_id AND ResourcePolicy.resource_type_id = 1) WHERE ResourcePolicy.resource_type_id = 1; -UPDATE ResourcePolicy SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE ResourcePolicy.resource_id = bitstream.bitstream_id AND ResourcePolicy.resource_type_id = 0) WHERE ResourcePolicy.resource_type_id = 0; -UPDATE resourcepolicy SET resource_type_id = -1 WHERE resource_type_id IS NULL; -UPDATE resourcepolicy SET action_id = -1 WHERE action_id IS NULL; - - --- Migrate Subscription -ALTER TABLE Subscription RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE Subscription ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX Subscription_person on Subscription(eperson_id); -UPDATE Subscription SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE Subscription.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE Subscription DROP COLUMN eperson_legacy_id; - -ALTER TABLE Subscription RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Subscription ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX Subscription_collection on Subscription(collection_id); -UPDATE Subscription SET collection_id = (SELECT collection.uuid FROM collection WHERE Subscription.collection_legacy_id = collection.collection_id); -ALTER TABLE Subscription DROP COLUMN collection_legacy_id; - - --- Migrate versionitem -ALTER TABLE versionitem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE versionitem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX versionitem_person on versionitem(eperson_id); -UPDATE versionitem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE versionitem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE versionitem DROP COLUMN eperson_legacy_id; - -ALTER TABLE versionitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE versionitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX versionitem_item on versionitem(item_id); -UPDATE versionitem SET item_id = (SELECT item.uuid FROM item WHERE versionitem.item_legacy_id = item.item_id); -ALTER TABLE versionitem DROP COLUMN item_legacy_id; -UPDATE versionitem SET version_number = -1 WHERE version_number IS NULL; - --- Migrate handle table -ALTER TABLE handle RENAME COLUMN resource_id to resource_legacy_id; -ALTER TABLE handle ADD resource_id RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX handle_object on handle(resource_id); -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4); -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3); -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2); - --- Migrate metadata value table -DROP VIEW dcvalue; - -ALTER TABLE metadatavalue ADD dspace_object_id RAW(16) REFERENCES dspaceobject(uuid); --- CREATE INDEX metadatavalue_field on metadatavalue(metadata_field_id); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); -UPDATE metadatavalue SET dspace_object_id = (SELECT eperson.uuid FROM eperson WHERE metadatavalue.resource_id = eperson.eperson_id AND metadatavalue.resource_type_id = 7) WHERE metadatavalue.resource_type_id= 7; -UPDATE metadatavalue SET dspace_object_id = (SELECT epersongroup.uuid FROM epersongroup WHERE metadatavalue.resource_id = epersongroup.eperson_group_id AND metadatavalue.resource_type_id = 6) WHERE metadatavalue.resource_type_id= 6; -UPDATE metadatavalue SET dspace_object_id = (SELECT community.uuid FROM community WHERE metadatavalue.resource_id = community.community_id AND metadatavalue.resource_type_id = 4) WHERE metadatavalue.resource_type_id= 4; -UPDATE metadatavalue SET dspace_object_id = (SELECT collection.uuid FROM collection WHERE metadatavalue.resource_id = collection.collection_id AND metadatavalue.resource_type_id = 3) WHERE metadatavalue.resource_type_id= 3; -UPDATE metadatavalue SET dspace_object_id = (SELECT item.uuid FROM item WHERE metadatavalue.resource_id = item.item_id AND metadatavalue.resource_type_id = 2) WHERE metadatavalue.resource_type_id= 2; -UPDATE metadatavalue SET dspace_object_id = (SELECT bundle.uuid FROM bundle WHERE metadatavalue.resource_id = bundle.bundle_id AND metadatavalue.resource_type_id = 1) WHERE metadatavalue.resource_type_id= 1; -UPDATE metadatavalue SET dspace_object_id = (SELECT bitstream.uuid FROM bitstream WHERE metadatavalue.resource_id = bitstream.bitstream_id AND metadatavalue.resource_type_id = 0) WHERE metadatavalue.resource_type_id= 0; -DROP INDEX metadatavalue_item_idx; -DROP INDEX metadatavalue_item_idx2; -ALTER TABLE metadatavalue DROP COLUMN resource_id; -ALTER TABLE metadatavalue DROP COLUMN resource_type_id; -UPDATE MetadataValue SET confidence = -1 WHERE confidence IS NULL; -UPDATE metadatavalue SET place = -1 WHERE place IS NULL; - --- Alter harvested item -ALTER TABLE harvested_item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE harvested_item ADD item_id RAW(16) REFERENCES item(uuid); -CREATE INDEX harvested_item_item on harvested_item(item_id); -UPDATE harvested_item SET item_id = (SELECT item.uuid FROM item WHERE harvested_item.item_legacy_id = item.item_id); -ALTER TABLE harvested_item DROP COLUMN item_legacy_id; - --- Alter harvested collection -ALTER TABLE harvested_collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE harvested_collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX harvested_collection_collectio on harvested_collection(collection_id); -UPDATE harvested_collection SET collection_id = (SELECT collection.uuid FROM collection WHERE harvested_collection.collection_legacy_id = collection.collection_id); -ALTER TABLE harvested_collection DROP COLUMN collection_legacy_id; - -UPDATE harvested_collection SET harvest_type = -1 WHERE harvest_type IS NULL; -UPDATE harvested_collection SET harvest_status = -1 WHERE harvest_status IS NULL; - - ---Alter workspaceitem -ALTER TABLE workspaceitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workspaceitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX workspaceitem_item on workspaceitem(item_id); -UPDATE workspaceitem SET item_id = (SELECT item.uuid FROM item WHERE workspaceitem.item_legacy_id = item.item_id); -ALTER TABLE workspaceitem DROP COLUMN item_legacy_id; - -ALTER TABLE workspaceitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workspaceitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX workspaceitem_coll on workspaceitem(collection_id); -UPDATE workspaceitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workspaceitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workspaceitem DROP COLUMN collection_legacy_id; - -UPDATE workspaceitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workspaceitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workspaceitem SET multiple_files = '0' WHERE multiple_files IS NULL; -UPDATE workspaceitem SET stage_reached = -1 WHERE stage_reached IS NULL; -UPDATE workspaceitem SET page_reached = -1 WHERE page_reached IS NULL; - ---Alter epersongroup2workspaceitem -ALTER TABLE epersongroup2workspaceitem RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE epersongroup2workspaceitem ADD eperson_group_id RAW(16) REFERENCES epersongroup(uuid); -CREATE INDEX epersongroup2workspaceitem_gro on epersongroup2workspaceitem(eperson_group_id); -UPDATE epersongroup2workspaceitem SET eperson_group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE epersongroup2workspaceitem.eperson_group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE epersongroup2workspaceitem DROP COLUMN eperson_group_legacy_id; - -ALTER TABLE epersongroup2workspaceitem DROP COLUMN id; -ALTER TABLE epersongroup2workspaceitem MODIFY workspace_item_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem MODIFY eperson_group_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem add CONSTRAINT epersongroup2wsitem_unqiue primary key (workspace_item_id,eperson_group_id); - ---Alter most_recent_checksum -ALTER TABLE most_recent_checksum RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE most_recent_checksum ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX most_recent_checksum_bitstream on most_recent_checksum(bitstream_id); -UPDATE most_recent_checksum SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE most_recent_checksum.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE most_recent_checksum DROP COLUMN bitstream_legacy_id; - -UPDATE most_recent_checksum SET to_be_processed = '0' WHERE to_be_processed IS NULL; -UPDATE most_recent_checksum SET matched_prev_checksum = '0' WHERE matched_prev_checksum IS NULL; - ---Alter checksum_history -ALTER TABLE checksum_history RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE checksum_history ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX checksum_history_bitstream on checksum_history(bitstream_id); -UPDATE checksum_history SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE checksum_history.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE checksum_history DROP COLUMN bitstream_legacy_id; - -RENAME checksum_history_seq TO checksum_history_check_id_seq; - ---Alter table doi -ALTER TABLE doi ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX doi_object on doi(dspace_object); -UPDATE doi SET dspace_object = (SELECT community.uuid FROM community WHERE doi.resource_id = community.community_id AND doi.resource_type_id = 4) WHERE doi.resource_type_id = 4; -UPDATE doi SET dspace_object = (SELECT collection.uuid FROM collection WHERE doi.resource_id = collection.collection_id AND doi.resource_type_id = 3) WHERE doi.resource_type_id = 3; -UPDATE doi SET dspace_object = (SELECT item.uuid FROM item WHERE doi.resource_id = item.item_id AND doi.resource_type_id = 2) WHERE doi.resource_type_id = 2; -UPDATE doi SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE doi.resource_id = bundle.bundle_id AND doi.resource_type_id = 1) WHERE doi.resource_type_id = 1; -UPDATE doi SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE doi.resource_id = bitstream.bitstream_id AND doi.resource_type_id = 0) WHERE doi.resource_type_id = 0; - ---Update table bitstreamformatregistry -UPDATE bitstreamformatregistry SET support_level = -1 WHERE support_level IS NULL; - ---Update table requestitem -UPDATE requestitem SET allfiles = '0' WHERE allfiles IS NULL; -UPDATE requestitem SET accept_request = '0' WHERE accept_request IS NULL; - ---Update table webapp -UPDATE webapp SET isui = -1 WHERE isui IS NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql deleted file mode 100644 index 8f1a7ad157a2..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS_3378 Lost oracle indexes ------------------------------------------------------- -CREATE UNIQUE INDEX eperson_eperson on eperson(eperson_id); -CREATE UNIQUE INDEX epersongroup_eperson_group on epersongroup(eperson_group_id); -CREATE UNIQUE INDEX community_community on community(community_id); -CREATE UNIQUE INDEX collection_collection on collection(collection_id); -CREATE UNIQUE INDEX item_item on item(item_id); -CREATE UNIQUE INDEX bundle_bundle on bundle(bundle_id); -CREATE UNIQUE INDEX bitstream_bitstream on bitstream(bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql deleted file mode 100644 index 8ad6f7fcd247..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3024 Invent "permanent" groups ------------------------------------------------------- - -ALTER TABLE epersongroup - ADD (permanent NUMBER(1) DEFAULT 0); -UPDATE epersongroup SET permanent = 1 - WHERE uuid IN ( - SELECT dspace_object_id - FROM metadataschemaregistry s - JOIN metadatafieldregistry f USING (metadata_schema_id) - JOIN metadatavalue v USING (metadata_field_id) - WHERE s.short_id = 'dc' - AND f.element = 'title' - AND f.qualifier IS NULL - AND dbms_lob.compare(v.text_value, 'Administrator') = 0 OR dbms_lob.compare(v.text_value,'Anonymous') = 0 - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql deleted file mode 100644 index 18cb4a50841d..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3024 extremely slow searching when logged in as admin ---------------------------------------------------------------- --- This script will put the group name on the epersongroup --- record itself for performance reasons. It will also make --- sure that a group name is unique (so that for example no two --- Administrator groups can be created). ---------------------------------------------------------------- - -ALTER TABLE epersongroup -ADD name VARCHAR2(250); - -CREATE UNIQUE INDEX epersongroup_unique_idx_name on epersongroup(name); - -UPDATE epersongroup -SET name = -(SELECT text_value - FROM metadatavalue v - JOIN metadatafieldregistry field on v.metadata_field_id = field.metadata_field_id - JOIN metadataschemaregistry s ON field.metadata_schema_id = s.metadata_schema_id - WHERE s.short_id = 'dc' AND element = 'title' AND qualifier IS NULL - AND v.dspace_object_id = epersongroup.uuid); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql deleted file mode 100644 index e0a103749c2b..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1955 resize rpdescription for embargo reason ------------------------------------------------------- - --- We cannot alter type between varchar2 & clob directly so an in between column is required -ALTER TABLE resourcepolicy ADD rpdescription_clob CLOB; -UPDATE resourcepolicy SET rpdescription_clob=rpdescription, rpdescription=null; -ALTER TABLE resourcepolicy DROP COLUMN rpdescription; -ALTER TABLE resourcepolicy RENAME COLUMN rpdescription_clob TO rpdescription; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql deleted file mode 100644 index 7b13d10b6d4f..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql +++ /dev/null @@ -1,46 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3086 OAI Harvesting performance ---------------------------------------------------------------- --- This script will create indexes on the key fields of the --- metadataschemaregistry and metadatafieldregistry tables to --- increase the performance of the queries. It will also add --- "ON DELETE CASCADE" to improve the performance of Item deletion. ---------------------------------------------------------------- - -CREATE UNIQUE INDEX metadataschema_idx_short_id on metadataschemaregistry(short_id); - -CREATE INDEX metadatafield_idx_elem_qual on metadatafieldregistry(element, qualifier); - -CREATE INDEX resourcepolicy_idx_rptype on resourcepolicy(rptype); - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE RESOURCEPOLICY ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE RESOURCEPOLICY SET DSPACE_OBJECT_NEW = DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY DROP COLUMN DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT; - -ALTER TABLE RESOURCEPOLICY -ADD CONSTRAINT RESOURCEPOLICY_DSPACE_OBJ_FK -FOREIGN KEY (DSPACE_OBJECT) -REFERENCES dspaceobject(uuid) -ON DELETE CASCADE; - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE METADATAVALUE ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE METADATAVALUE SET DSPACE_OBJECT_NEW = DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE DROP COLUMN DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT_ID; - -ALTER TABLE METADATAVALUE -ADD CONSTRAINT METADATAVALUE_DSPACE_OBJECT_FK -FOREIGN KEY (DSPACE_OBJECT_ID) -REFERENCES DSPACEOBJECT(UUID) -ON DELETE CASCADE; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql deleted file mode 100644 index a1b303f0365a..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql +++ /dev/null @@ -1,33 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3125 Submitters cannot delete bistreams of workspaceitems ---------------------------------------------------------------- --- This script will add delete rights on all bundles/bitstreams --- for people who already have REMOVE rights. --- In previous versions REMOVE rights was enough to ensure that --- you could delete an object. ---------------------------------------------------------------- -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname, -rptype, rpdescription, eperson_id, epersongroup_id, dspace_object) -SELECT -resourcepolicy_seq.nextval AS policy_id, -resource_type_id, -resource_id, --- Insert the Constants.DELETE action -2 AS action_id, -start_date, -end_date, -rpname, -rptype, -rpdescription, -eperson_id, -epersongroup_id, -dspace_object -FROM resourcepolicy WHERE action_id=4 AND (resource_type_id=0 OR resource_type_id=1 OR resource_type_id=2); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql deleted file mode 100644 index 2ba3517e1988..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3168 Embargo request Unknown Entity RequestItem ---------------------------------------------------------------- --- convert the item_id and bitstream_id columns from integer to UUID ---------------------------------------------------------------- -ALTER TABLE requestitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE requestitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX requestitem_item on requestitem(item_id); -UPDATE requestitem SET item_id = (SELECT item.uuid FROM item WHERE requestitem.item_legacy_id = item.item_id); -ALTER TABLE requestitem DROP COLUMN item_legacy_id; - -ALTER TABLE requestitem RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE requestitem ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX requestitem_bitstream on requestitem(bitstream_id); -UPDATE requestitem SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE requestitem.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE requestitem DROP COLUMN bitstream_legacy_id; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql deleted file mode 100644 index 74783974468c..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2775 Drop unused sequences ------------------------------------------------------- - -DROP SEQUENCE bitstream_seq; -DROP SEQUENCE bundle2bitstream_seq; -DROP SEQUENCE bundle_seq; -DROP SEQUENCE collection2item_seq; -DROP SEQUENCE collection_seq; -DROP SEQUENCE community2collection_seq; -DROP SEQUENCE community2community_seq; -DROP SEQUENCE community_seq; -DROP SEQUENCE dcvalue_seq; -DROP SEQUENCE eperson_seq; -DROP SEQUENCE epersongroup2eperson_seq; -DROP SEQUENCE epersongroup2workspaceitem_seq; -DROP SEQUENCE epersongroup_seq; -DROP SEQUENCE group2group_seq; -DROP SEQUENCE group2groupcache_seq; -DROP SEQUENCE historystate_seq; -DROP SEQUENCE item2bundle_seq; -DROP SEQUENCE item_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql deleted file mode 100644 index 96f125f78b61..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql +++ /dev/null @@ -1,44 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------------------------------------- --- DS-3277 : 'handle_id' column needs its own separate sequence, so that Handles --- can be minted from 'handle_seq' ----------------------------------------------------------------------------------- --- Create a new sequence for 'handle_id' column. --- The role of this sequence is to simply provide a unique internal ID to the database. -CREATE SEQUENCE handle_id_seq; --- Initialize new 'handle_id_seq' to the maximum value of 'handle_id' -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(handle_id) INTO curr FROM handle; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_id_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_id_seq START WITH ' || NVL(curr,1); -END; -/ - --- Ensure the 'handle_seq' is updated to the maximum *suffix* in 'handle' column, --- as this sequence is used to mint new Handles. --- Code borrowed from update-sequences.sql and updateseq.sql -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(to_number(regexp_replace(handle, '.*/', ''), '999999999999')) INTO curr FROM handle WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$'); - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_seq START WITH ' || NVL(curr,1); -END; -/ \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql deleted file mode 100644 index e1220c8c7cce..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql deleted file mode 100644 index 5c3c3842aaea..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3410 ---------------------------------------------------------------- --- This script will create lost indexes ---------------------------------------------------------------- - -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql deleted file mode 100644 index 47b2d18be8a3..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql +++ /dev/null @@ -1,16 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 Handle of collections and communities are lost due to bug at V6.0_2015.03.07__DS-2701_Hibernate_migration.sql ------------------------------------------------------- - -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4) where handle.resource_type_id = 4; -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3) where handle.resource_type_id = 3; -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2) where handle.resource_type_id = 2; - \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql deleted file mode 100644 index 30cfae91c83a..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------------------------------------------------- --- This adds an extra column to the eperson table where we save a salt for stateless authentication ------------------------------------------------------------------------------------------------------------- -ALTER TABLE eperson ADD session_salt varchar(32); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql deleted file mode 100644 index 68855dc2dc0a..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql +++ /dev/null @@ -1,65 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the dspace 7 entities usage -------------------------------------------------------------- -CREATE SEQUENCE entity_type_id_seq; -CREATE SEQUENCE relationship_type_id_seq; -CREATE SEQUENCE relationship_id_seq; - -CREATE TABLE entity_type -( - id INTEGER NOT NULL PRIMARY KEY, - label varchar(32) UNIQUE NOT NULL -); - -CREATE TABLE relationship_type -( - id INTEGER NOT NULL PRIMARY KEY, - left_type INTEGER NOT NULL, - right_type INTEGER NOT NULL, - left_label varchar(32) NOT NULL, - right_label varchar(32) NOT NULL, - left_min_cardinality INTEGER, - left_max_cardinality INTEGER, - right_min_cardinality INTEGER, - right_max_cardinality INTEGER, - FOREIGN KEY (left_type) REFERENCES entity_type(id), - FOREIGN KEY (right_type) REFERENCES entity_type(id), - CONSTRAINT u_relationship_type_constraint UNIQUE (left_type, right_type, left_label, right_label) - -); - -CREATE TABLE relationship -( - id INTEGER NOT NULL PRIMARY KEY, - left_id uuid NOT NULL REFERENCES item(uuid), - type_id INTEGER NOT NULL REFERENCES relationship_type(id), - right_id uuid NOT NULL REFERENCES item(uuid), - left_place INTEGER, - right_place INTEGER, - CONSTRAINT u_constraint UNIQUE (left_id, type_id, right_id) - -); - -CREATE INDEX entity_type_label_idx ON entity_type(label); -CREATE INDEX relationship_type_by_left_type_idx ON relationship_type(left_type); -CREATE INDEX relationship_type_by_right_type_idx ON relationship_type(right_type); -CREATE INDEX relationship_type_by_left_label_idx ON relationship_type(left_label); -CREATE INDEX relationship_type_by_right_label_idx ON relationship_type(right_label); -CREATE INDEX relationship_by_left_id_idx ON relationship(left_id); -CREATE INDEX relationship_by_right_id_idx ON relationship(right_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql deleted file mode 100644 index 68ed690f89e8..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ----------------------------------------------------------------------------------------------------------------- --- This adds TYPE_INHERITED to all old archived items permission due to the change on resource policy management ----------------------------------------------------------------------------------------------------------------- -UPDATE resourcepolicy set rptype = 'TYPE_INHERITED' - where resource_type_id = 2 and rptype is null - and dspace_object in ( - select uuid from item where in_archive = 1 - ); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql deleted file mode 100644 index b23170f43732..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-4239 Migrate the workflow.xml to spring ---------------------------------------------------------------- --- This script will rename the default workflow "default" name --- to the new "defaultWorkflow" identifier ---------------------------------------------------------------- - -UPDATE cwf_pooltask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; -UPDATE cwf_claimtask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql deleted file mode 100644 index 57f00a993f80..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns leftwardValue and rightwardValue in table relationship --- Rename columns left_label and right_label to leftward_type and rightward_type ------------------------------------------------------------------------------------ - -ALTER TABLE relationship ADD leftward_value VARCHAR; -ALTER TABLE relationship ADD rightward_value VARCHAR; - -ALTER TABLE relationship_type RENAME left_label TO leftward_type; -ALTER TABLE relationship_type RENAME right_label TO rightward_type; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql deleted file mode 100644 index a58bbc501dd1..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql +++ /dev/null @@ -1,14 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns copy_left and copy_right for RelationshipType ------------------------------------------------------------------------------------ - -ALTER TABLE relationship_type ADD copy_to_left BOOLEAN DEFAULT FALSE NOT NULL; -ALTER TABLE relationship_type ADD copy_to_right BOOLEAN DEFAULT FALSE NOT NULL; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql deleted file mode 100644 index a7015e3033bf..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql +++ /dev/null @@ -1,40 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== -CREATE SEQUENCE process_id_seq; - -CREATE TABLE process -( - process_id INTEGER NOT NULL PRIMARY KEY, - user_id RAW(16) NOT NULL, - start_time TIMESTAMP, - finished_time TIMESTAMP, - creation_time TIMESTAMP NOT NULL, - script VARCHAR(256) NOT NULL, - status VARCHAR(32), - parameters VARCHAR(512) -); - -CREATE TABLE process2bitstream -( - process_id INTEGER REFERENCES process(process_id), - bitstream_id RAW(16) REFERENCES bitstream(uuid), - CONSTRAINT PK_process2bitstream PRIMARY KEY (process_id, bitstream_id) -); - -CREATE INDEX process_user_id_idx ON process(user_id); -CREATE INDEX process_status_idx ON process(status); -CREATE INDEX process_name_idx on process(script); -CREATE INDEX process_start_time_idx on process(start_time); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql deleted file mode 100644 index a108fd74b468..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the IRUS statistics harvester -------------------------------------------------------------- - -CREATE SEQUENCE openurltracker_seq; - -CREATE TABLE openurltracker -( - tracker_id NUMBER, - tracker_url VARCHAR2(1000), - uploaddate DATE, - CONSTRAINT openurltracker_PK PRIMARY KEY (tracker_id) -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql deleted file mode 100644 index f71173abe607..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Drop the 'workflowitem' and 'tasklistitem' tables ------------------------------------------------------------------------------------ - -DROP TABLE workflowitem CASCADE CONSTRAINTS; -DROP TABLE tasklistitem CASCADE CONSTRAINTS; - -DROP SEQUENCE workflowitem_seq; -DROP SEQUENCE tasklistitem_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql deleted file mode 100644 index 95d07be477d5..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql +++ /dev/null @@ -1,13 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns copy_left and copy_right for RelationshipType ------------------------------------------------------------------------------------ - -ALTER TABLE relationship_type ADD tilted INTEGER; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql deleted file mode 100644 index 9c39091f89dc..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql +++ /dev/null @@ -1,56 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- --- Move all 'relationship.type' metadata fields to 'dspace.entity.type'. Remove 'relationship' schema. -------------------------------------------------------------------------------------------------------- --- Special case: we need to the 'dspace' schema to already exist. If users don't already have it we must create it --- manually via SQL, as by default it won't be created until database updates are finished. -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) - SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/dspace' as namespace, 'dspace' as short_id FROM dual - WHERE NOT EXISTS - (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry - WHERE namespace = 'http://dspace.org/dspace' AND short_id = 'dspace'); - - --- Add 'dspace.entity.type' field to registry (if missing) -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace'), 'entity', 'type' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entitye' AND qualifier='type'); - --- Moves all 'relationship.type' field values to a new 'dspace.entity.type' field -UPDATE metadatavalue - SET metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entity' AND qualifier='type') - WHERE metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='relationship') - AND element = 'type' AND qualifier is NULL); - - --- Delete 'relationship.type' field from registry -DELETE FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id = 'relationship') - AND element = 'type' AND qualifier is NULL; - --- Delete 'relationship' schema (which is now empty) -DELETE FROM metadataschemaregistry WHERE short_id = 'relationship' AND namespace = 'http://dspace.org/relationship'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql deleted file mode 100644 index b4d4d755cbe7..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql +++ /dev/null @@ -1,77 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- SQL code to update the ID (primary key) generating sequences, if some --- import operation has set explicit IDs. --- --- Sequences are used to generate IDs for new rows in the database. If a --- bulk import operation, such as an SQL dump, specifies primary keys for --- imported data explicitly, the sequences are out of sync and need updating. --- This SQL code does just that. --- --- This should rarely be needed; any bulk import should be performed using the --- org.dspace.content API which is safe to use concurrently and in multiple --- JVMs. The SQL code below will typically only be required after a direct --- SQL data dump from a backup or somesuch. - --- The 'updateseq' procedure was derived from incseq.sql found at: --- http://www.akadia.com/services/scripts/incseq.sql - -DECLARE - PROCEDURE updateseq ( seq IN VARCHAR, - tbl IN VARCHAR, - attr IN VARCHAR, - cond IN VARCHAR DEFAULT '' ) IS - curr NUMBER := 0; - BEGIN - EXECUTE IMMEDIATE 'SELECT max(' || attr - || ') FROM ' || tbl - || ' ' || cond - INTO curr; - curr := curr + 1; - EXECUTE IMMEDIATE 'DROP SEQUENCE ' || seq; - EXECUTE IMMEDIATE 'CREATE SEQUENCE ' - || seq - || ' START WITH ' - || NVL(curr, 1); - END updateseq; - -BEGIN - updateseq('bitstreamformatregistry_seq', 'bitstreamformatregistry', - 'bitstream_format_id'); - updateseq('fileextension_seq', 'fileextension', 'file_extension_id'); - updateseq('resourcepolicy_seq', 'resourcepolicy', 'policy_id'); - updateseq('workspaceitem_seq', 'workspaceitem', 'workspace_item_id'); - updateseq('registrationdata_seq', 'registrationdata', - 'registrationdata_id'); - updateseq('subscription_seq', 'subscription', 'subscription_id'); - updateseq('metadatafieldregistry_seq', 'metadatafieldregistry', - 'metadata_field_id'); - updateseq('metadatavalue_seq', 'metadatavalue', 'metadata_value_id'); - updateseq('metadataschemaregistry_seq', 'metadataschemaregistry', - 'metadata_schema_id'); - updateseq('harvested_collection_seq', 'harvested_collection', 'id'); - updateseq('harvested_item_seq', 'harvested_item', 'id'); - updateseq('webapp_seq', 'webapp', 'webapp_id'); - updateseq('requestitem_seq', 'requestitem', 'requestitem_id'); - updateseq('handle_id_seq', 'handle', 'handle_id'); - - -- Handle Sequence is a special case. Since Handles minted by DSpace - -- use the 'handle_seq', we need to ensure the next assigned handle - -- will *always* be unique. So, 'handle_seq' always needs to be set - -- to the value of the *largest* handle suffix. That way when the - -- next handle is assigned, it will use the next largest number. This - -- query does the following: - -- For all 'handle' values which have a number in their suffix - -- (after '/'), find the maximum suffix value, convert it to a - -- number, and set the 'handle_seq' to start at the next value (see - -- updateseq above for more). - updateseq('handle_seq', 'handle', - q'{to_number(regexp_replace(handle, '.*/', ''), '999999999999')}', - q'{WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$')}'); -END; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md index 72eb279912b5..e16e4c6d4c91 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md @@ -3,8 +3,9 @@ The SQL scripts in this directory are PostgreSQL-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. @@ -22,7 +23,7 @@ Please see the Flyway Documentation for more information: http://flywaydb.org/ The `update-sequences.sql` script in this directory may still be used to update your internal database counts if you feel they have gotten out of "sync". This may sometimes occur after large restores of content (e.g. when using the DSpace -[AIP Backup and Restore](https://wiki.duraspace.org/display/DSDOC5x/AIP+Backup+and+Restore) +[AIP Backup and Restore](https://wiki.lyrasis.org/display/DSDOC7x/AIP+Backup+and+Restore) feature). This `update-sequences.sql` script can be executed by running diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql new file mode 100644 index 000000000000..5a6abda04101 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql @@ -0,0 +1,28 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- =============================================================== +-- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING +-- +-- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED +-- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. +-- http://flywaydb.org/ +-- =============================================================== + +------------------------------------------------------------------------------------------------------- +------------------------------------------------------------------------------------------------------- +UPDATE metadatavalue SET dspace_object_id = (SELECT uuid + FROM collection + WHERE template_item_id = dspace_object_id) +WHERE dspace_object_id IN (SELECT template_item_id + FROM Collection) + AND metadata_field_id + IN (SELECT metadata_field_id + FROM metadatafieldregistry mfr LEFT JOIN metadataschemaregistry msr + ON mfr.metadata_schema_id = msr.metadata_schema_id + WHERE msr.short_id = 'dspace' AND mfr.element = 'entity' AND mfr.qualifier = 'type'); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql new file mode 100644 index 000000000000..ae8f1e7ef5d2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql @@ -0,0 +1,15 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +------------------------------------------------------------------------------------- +---- ALTER table collection +------------------------------------------------------------------------------------- + +ALTER TABLE collection DROP COLUMN workflow_step_1; +ALTER TABLE collection DROP COLUMN workflow_step_2; +ALTER TABLE collection DROP COLUMN workflow_step_3; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql new file mode 100644 index 000000000000..5523a7e7ecc1 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql @@ -0,0 +1,21 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +---------------------------------------------------- +-- Make sure the metadatavalue.place column starts at 0 instead of 1 +---------------------------------------------------- +UPDATE metadatavalue AS mdv +SET place = mdv.place - minplace +FROM ( + SELECT dspace_object_id, metadata_field_id, MIN(place) AS minplace + FROM metadatavalue + GROUP BY dspace_object_id, metadata_field_id + ) AS mp +WHERE mdv.dspace_object_id = mp.dspace_object_id + AND mdv.metadata_field_id = mp.metadata_field_id + AND minplace > 0; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.04.29__orcid_queue_and_history.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.04.29__orcid_queue_and_history.sql new file mode 100644 index 000000000000..303160251568 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.04.29__orcid_queue_and_history.sql @@ -0,0 +1,54 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create tables for ORCID Queue and History +----------------------------------------------------------------------------------- + +CREATE SEQUENCE orcid_queue_id_seq; + +CREATE TABLE orcid_queue +( + id INTEGER NOT NULL, + owner_id uuid NOT NULL, + entity_id uuid, + attempts INTEGER, + put_code CHARACTER VARYING(255), + record_type CHARACTER VARYING(255), + description CHARACTER VARYING(255), + operation CHARACTER VARYING(255), + metadata TEXT, + CONSTRAINT orcid_queue_pkey PRIMARY KEY (id), + CONSTRAINT orcid_queue_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), + CONSTRAINT orcid_queue_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) +); + +CREATE INDEX orcid_queue_owner_id_index on orcid_queue(owner_id); + + +CREATE SEQUENCE orcid_history_id_seq; + +CREATE TABLE orcid_history +( + id INTEGER NOT NULL, + owner_id uuid NOT NULL, + entity_id uuid, + put_code CHARACTER VARYING(255), + timestamp_last_attempt TIMESTAMP, + response_message text, + status INTEGER, + metadata TEXT, + operation CHARACTER VARYING(255), + record_type CHARACTER VARYING(255), + description CHARACTER VARYING(255), + CONSTRAINT orcid_history_pkey PRIMARY KEY (id), + CONSTRAINT orcid_history_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), + CONSTRAINT orcid_history_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) +); + +CREATE INDEX orcid_history_owner_id_index on orcid_history(owner_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.05.16__Orcid_token_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.05.16__Orcid_token_table.sql new file mode 100644 index 000000000000..6c3793d42213 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.05.16__Orcid_token_table.sql @@ -0,0 +1,24 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create table for ORCID access tokens +----------------------------------------------------------------------------------- + +CREATE SEQUENCE orcid_token_id_seq; + +CREATE TABLE orcid_token +( + id INTEGER NOT NULL, + eperson_id uuid NOT NULL UNIQUE, + profile_item_id uuid, + access_token VARCHAR(100) NOT NULL, + CONSTRAINT orcid_token_pkey PRIMARY KEY (id), + CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid), + CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid) +); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.16__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.16__process_to_group.sql new file mode 100644 index 000000000000..0e7d417ae52d --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.16__process_to_group.sql @@ -0,0 +1,18 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +------------------------------------------------------------------------------- +-- Table to store Groups related to a Process on its creation +------------------------------------------------------------------------------- + +CREATE TABLE Process2Group +( + process_id INTEGER REFERENCES Process(process_id), + group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE, + CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id) +); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql new file mode 100644 index 000000000000..7bf3948d3a63 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class) +ALTER TABLE relationship ADD COLUMN IF NOT EXISTS latest_version_status INTEGER DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql new file mode 100644 index 000000000000..61e01494fcb3 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql @@ -0,0 +1,43 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- ADD table subscription_parameter +----------------------------------------------------------------------------------- + + +CREATE SEQUENCE if NOT EXISTS subscription_parameter_seq; +----------------------------------------------------------------------------------- +-- ADD table subscription_parameter +----------------------------------------------------------------------------------- +CREATE TABLE if NOT EXISTS subscription_parameter +( + subscription_parameter_id INTEGER NOT NULL, + name CHARACTER VARYING(255), + value CHARACTER VARYING(255), + subscription_id INTEGER NOT NULL, + CONSTRAINT subscription_parameter_pkey PRIMARY KEY (subscription_parameter_id), + CONSTRAINT subscription_parameter_subscription_fkey FOREIGN KEY (subscription_id) REFERENCES subscription (subscription_id) ON DELETE CASCADE +); +-- +ALTER TABLE subscription ADD COLUMN if NOT EXISTS dspace_object_id UUID; +-- -- +ALTER TABLE subscription ADD COLUMN if NOT EXISTS type CHARACTER VARYING(255); +---- -- +ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_dspaceobject_fkey; +ALTER TABLE subscription ADD CONSTRAINT subscription_dspaceobject_fkey FOREIGN KEY (dspace_object_id) REFERENCES dspaceobject (uuid); +-- +UPDATE subscription SET dspace_object_id = collection_id , type = 'content'; +-- +ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_collection_id_fkey; +-- -- +ALTER TABLE subscription DROP COLUMN IF EXISTS collection_id; +-- -- +INSERT INTO subscription_parameter (subscription_parameter_id, name, value, subscription_id) +SELECT getnextid('subscription_parameter'), 'frequency', 'D', subscription_id from "subscription" ; + diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.06__index_action_resource_policy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.06__index_action_resource_policy.sql new file mode 100644 index 000000000000..696e84433dcd --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.06__index_action_resource_policy.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +CREATE INDEX resourcepolicy_action_idx ON resourcepolicy(action_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.09__Supervision_Orders_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.09__Supervision_Orders_table.sql new file mode 100644 index 000000000000..f27a4f2a1bb6 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.09__Supervision_Orders_table.sql @@ -0,0 +1,85 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +------------------------------------------------------------------------------- +-- Table to store supervision orders +------------------------------------------------------------------------------- + +CREATE TABLE supervision_orders +( + id INTEGER PRIMARY KEY, + item_id UUID REFERENCES Item(uuid) ON DELETE CASCADE, + eperson_group_id UUID REFERENCES epersongroup(uuid) ON DELETE CASCADE +); + +CREATE SEQUENCE supervision_orders_seq; + +------------------------------------------------------------------------------- +-- migrate data from epersongroup2workspaceitem table +------------------------------------------------------------------------------- + +INSERT INTO supervision_orders (id, item_id, eperson_group_id) +SELECT getnextid('supervision_orders') AS id, w.item_id, e.uuid +FROM epersongroup2workspaceitem ew INNER JOIN workspaceitem w +ON ew.workspace_item_id = w.workspace_item_id +INNER JOIN epersongroup e +ON ew.eperson_group_id = e.uuid; + + +-- UPDATE policies for supervision orders +-- items, bundles and bitstreams + +do +$$ +DECLARE +rec record; +BEGIN + +FOR rec IN + +SELECT so.item_id as dspace_object, so.eperson_group_id, rp.resource_type_id +FROM supervision_orders so +INNER JOIN RESOURCEPOLICY rp on so.item_id = rp.dspace_object +AND so.eperson_group_id = rp.epersongroup_id +WHERE rp.rptype IS NULL + +UNION + +SELECT ib.bundle_id as dspace_object, so.eperson_group_id, rp.resource_type_id +FROM supervision_orders so +INNER JOIN item2bundle ib ON so.item_id = ib.item_id +INNER JOIN RESOURCEPOLICY rp on ib.bundle_id = rp.dspace_object +AND so.eperson_group_id = rp.epersongroup_id +WHERE rp.rptype IS NULL + +UNION + +SELECT bs.bitstream_id as dspace_object, so.eperson_group_id, rp.resource_type_id +FROM supervision_orders so +INNER JOIN item2bundle ib ON so.item_id = ib.item_id +INNER JOIN bundle2bitstream bs ON ib.bundle_id = bs.bundle_id +INNER JOIN RESOURCEPOLICY rp on bs.bitstream_id = rp.dspace_object +AND so.eperson_group_id = rp.epersongroup_id +WHERE rp.rptype IS NULL + +LOOP + +UPDATE RESOURCEPOLICY SET rptype = 'TYPE_SUBMISSION' +where dspace_object = rec.dspace_object +AND epersongroup_id = rec.eperson_group_id +AND rptype IS NULL; + +END LOOP; +END; +$$; + +------------------------------------------------------------------------------- +-- drop epersongroup2workspaceitem table +------------------------------------------------------------------------------- + +DROP TABLE epersongroup2workspaceitem; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.15__system_wide_alerts.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.15__system_wide_alerts.sql new file mode 100644 index 000000000000..9d13138fdada --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.5_2022.12.15__system_wide_alerts.sql @@ -0,0 +1,22 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Create table for System wide alerts +----------------------------------------------------------------------------------- + +CREATE SEQUENCE alert_id_seq; + +CREATE TABLE systemwidealert +( + alert_id INTEGER NOT NULL PRIMARY KEY, + message VARCHAR(512), + allow_sessions VARCHAR(64), + countdown_to TIMESTAMP, + active BOOLEAN +); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql new file mode 100644 index 000000000000..e4544e1de729 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.17__Remove_unused_sequence.sql @@ -0,0 +1,13 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Drop the 'history_seq' sequence (related table deleted at Dspace-1.5) +----------------------------------------------------------------------------------- + +DROP SEQUENCE IF EXISTS history_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql new file mode 100644 index 000000000000..8aec44a7f6f2 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql @@ -0,0 +1,17 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +----------------------------------------------------------------------------------- +-- Update short description for PNG mimetype in the bitstream format registry +-- See: https://github.com/DSpace/DSpace/pull/8722 +----------------------------------------------------------------------------------- + +UPDATE bitstreamformatregistry +SET short_description='PNG' +WHERE short_description='image/png' + AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql new file mode 100644 index 000000000000..ae0e414e4440 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql @@ -0,0 +1,10 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE orcid_history ALTER COLUMN description TYPE TEXT; +ALTER TABLE orcid_queue ALTER COLUMN description TYPE TEXT; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql new file mode 100644 index 000000000000..f7e0e51d0bf7 --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.04.19__process_parameters_to_text_type.sql @@ -0,0 +1,9 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +ALTER TABLE process ALTER COLUMN parameters TYPE TEXT; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql new file mode 100644 index 000000000000..9dd2f54a43eb --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V7.6_2023.10.12__Fix-deleted-primary-bitstreams.sql @@ -0,0 +1,34 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +BEGIN; + +-- Unset any primary bitstream that is marked as deleted +UPDATE bundle +SET primary_bitstream_id = NULL +WHERE primary_bitstream_id IN + ( SELECT bs.uuid + FROM bitstream AS bs + INNER JOIN bundle as bl ON bs.uuid = bl.primary_bitstream_id + WHERE bs.deleted IS TRUE ); + +-- Unset any primary bitstream that don't belong to bundle's bitstream list +UPDATE bundle +SET primary_bitstream_id = NULL +WHERE primary_bitstream_id IN + ( SELECT bl.primary_bitstream_id + FROM bundle as bl + WHERE bl.primary_bitstream_id IS NOT NULL + AND bl.primary_bitstream_id NOT IN + ( SELECT bitstream_id + FROM bundle2bitstream AS b2b + WHERE b2b.bundle_id = bl.uuid + ) + ); + +COMMIT; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V8.0_2023.08.07__qaevent_processed.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V8.0_2023.08.07__qaevent_processed.sql new file mode 100644 index 000000000000..5c3f0fac73ce --- /dev/null +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/V8.0_2023.08.07__qaevent_processed.sql @@ -0,0 +1,19 @@ +-- +-- The contents of this file are subject to the license and copyright +-- detailed in the LICENSE and NOTICE files at the root of the source +-- tree and available online at +-- +-- http://www.dspace.org/license/ +-- + +CREATE TABLE qaevent_processed ( + qaevent_id VARCHAR(255) NOT NULL, + qaevent_timestamp TIMESTAMP NULL, + eperson_uuid UUID NULL, + item_uuid UUID NULL, + CONSTRAINT qaevent_pk PRIMARY KEY (qaevent_id), + CONSTRAINT eperson_uuid_fkey FOREIGN KEY (eperson_uuid) REFERENCES eperson (uuid), + CONSTRAINT item_uuid_fkey FOREIGN KEY (item_uuid) REFERENCES item (uuid) +); + +CREATE INDEX item_uuid_idx ON qaevent_processed(item_uuid); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql index 749f82382c9d..f96434f1ba8c 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/update-sequences.sql @@ -19,21 +19,41 @@ -- JVMs. The SQL code below will typically only be required after a direct -- SQL data dump from a backup or somesuch. - +SELECT setval('alert_id_seq', max(alert_id)) FROM systemwidealert; SELECT setval('bitstreamformatregistry_seq', max(bitstream_format_id)) FROM bitstreamformatregistry; +SELECT setval('checksum_history_check_id_seq', max(check_id)) FROM checksum_history; +SELECT setval('cwf_claimtask_seq', max(claimtask_id)) FROM cwf_claimtask; +SELECT setval('cwf_collectionrole_seq', max(collectionrole_id)) FROM cwf_collectionrole; +SELECT setval('cwf_in_progress_user_seq', max(in_progress_user_id)) FROM cwf_in_progress_user; +SELECT setval('cwf_pooltask_seq', max(pooltask_id)) FROM cwf_pooltask; +SELECT setval('cwf_workflowitem_seq', max(workflowitem_id)) FROM cwf_workflowitem; +SELECT setval('cwf_workflowitemrole_seq', max(workflowitemrole_id)) FROM cwf_workflowitemrole; +SELECT setval('doi_seq', max(doi_id)) FROM doi; +SELECT setval('entity_type_id_seq', max(id)) FROM entity_type; SELECT setval('fileextension_seq', max(file_extension_id)) FROM fileextension; -SELECT setval('resourcepolicy_seq', max(policy_id)) FROM resourcepolicy; -SELECT setval('workspaceitem_seq', max(workspace_item_id)) FROM workspaceitem; -SELECT setval('registrationdata_seq', max(registrationdata_id)) FROM registrationdata; -SELECT setval('subscription_seq', max(subscription_id)) FROM subscription; -SELECT setval('metadatafieldregistry_seq', max(metadata_field_id)) FROM metadatafieldregistry; -SELECT setval('metadatavalue_seq', max(metadata_value_id)) FROM metadatavalue; -SELECT setval('metadataschemaregistry_seq', max(metadata_schema_id)) FROM metadataschemaregistry; +SELECT setval('handle_id_seq', max(handle_id)) FROM handle; SELECT setval('harvested_collection_seq', max(id)) FROM harvested_collection; SELECT setval('harvested_item_seq', max(id)) FROM harvested_item; -SELECT setval('webapp_seq', max(webapp_id)) FROM webapp; +SELECT setval('metadatafieldregistry_seq', max(metadata_field_id)) FROM metadatafieldregistry; +SELECT setval('metadataschemaregistry_seq', max(metadata_schema_id)) FROM metadataschemaregistry; +SELECT setval('metadatavalue_seq', max(metadata_value_id)) FROM metadatavalue; +SELECT setval('openurltracker_seq', max(tracker_id)) FROM openurltracker; +SELECT setval('orcid_history_id_seq', max(id)) FROM orcid_history; +SELECT setval('orcid_queue_id_seq', max(id)) FROM orcid_queue; +SELECT setval('orcid_token_id_seq', max(id)) FROM orcid_token; +SELECT setval('process_id_seq', max(process_id)) FROM process; +SELECT setval('registrationdata_seq', max(registrationdata_id)) FROM registrationdata; +SELECT setval('relationship_id_seq', max(id)) FROM relationship; +SELECT setval('relationship_type_id_seq', max(id)) FROM relationship_type; SELECT setval('requestitem_seq', max(requestitem_id)) FROM requestitem; -SELECT setval('handle_id_seq', max(handle_id)) FROM handle; +SELECT setval('resourcepolicy_seq', max(policy_id)) FROM resourcepolicy; +SELECT setval('subscription_parameter_seq', max(subscription_id)) FROM subscription_parameter; +SELECT setval('subscription_seq', max(subscription_id)) FROM subscription; +SELECT setval('supervision_orders_seq', max(id)) FROM supervision_orders; +SELECT setval('versionhistory_seq', max(versionhistory_id)) FROM versionhistory; +SELECT setval('versionitem_seq', max(versionitem_id)) FROM versionitem; +SELECT setval('webapp_seq', max(webapp_id)) FROM webapp; +SELECT setval('workspaceitem_seq', max(workspace_item_id)) FROM workspaceitem; -- Handle Sequence is a special case. Since Handles minted by DSpace use the 'handle_seq', -- we need to ensure the next assigned handle will *always* be unique. So, 'handle_seq' diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql deleted file mode 100644 index 9bca3a17c99e..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND resource_id = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql deleted file mode 100644 index 917078594cfa..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql +++ /dev/null @@ -1,37 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- --- Alter workflow item -ALTER TABLE workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE workflowitem SET item_id = (SELECT item.uuid FROM item WHERE workflowitem.item_legacy_id = item.item_id); -ALTER TABLE workflowitem DROP COLUMN item_legacy_id; - --- Migrate task list item -ALTER TABLE TasklistItem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE TasklistItem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -UPDATE TasklistItem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE TasklistItem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE TasklistItem DROP COLUMN eperson_legacy_id; - --- Migrate task workflow item -ALTER TABLE workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workflowitem DROP COLUMN collection_legacy_id; -ALTER TABLE workflowitem RENAME COLUMN owner to owner_legacy_id; -ALTER TABLE workflowitem ADD owner RAW(16) REFERENCES EPerson (uuid); -UPDATE workflowitem SET owner = (SELECT eperson.uuid FROM eperson WHERE workflowitem.owner_legacy_id = eperson.eperson_id); -ALTER TABLE workflowitem DROP COLUMN owner_legacy_id; -UPDATE workflowitem SET state = -1 WHERE state IS NULL; -UPDATE workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql deleted file mode 100644 index b3887a5af4d1..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND dspace_object = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql deleted file mode 100644 index 7a992836eea6..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql +++ /dev/null @@ -1,141 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -UPDATE collection SET workflow_step_1 = null; -UPDATE collection SET workflow_step_2 = null; -UPDATE collection SET workflow_step_3 = null; - --- cwf_workflowitem - -DROP INDEX cwf_workflowitem_coll_fk_idx; - -ALTER TABLE cwf_workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE cwf_workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE cwf_workflowitem SET item_id = (SELECT item.uuid FROM item WHERE cwf_workflowitem.item_legacy_id = item.item_id); -ALTER TABLE cwf_workflowitem DROP COLUMN item_legacy_id; - -ALTER TABLE cwf_workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_workflowitem DROP COLUMN collection_legacy_id; - -UPDATE cwf_workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE cwf_workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE cwf_workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - --- cwf_collectionrole - -ALTER TABLE cwf_collectionrole DROP CONSTRAINT cwf_collectionrole_unique; -DROP INDEX cwf_cr_coll_role_fk_idx; -DROP INDEX cwf_cr_coll_fk_idx; - -ALTER TABLE cwf_collectionrole RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_collectionrole ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_collectionrole SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_collectionrole.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_collectionrole DROP COLUMN collection_legacy_id; - -ALTER TABLE cwf_collectionrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_collectionrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_collectionrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_collectionrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_collectionrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - --- cwf_workflowitemrole - -ALTER TABLE cwf_workflowitemrole DROP CONSTRAINT cwf_workflowitemrole_unique; -DROP INDEX cwf_wfir_item_role_fk_idx; -DROP INDEX cwf_wfir_item_fk_idx; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_workflowitemrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_workflowitemrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_workflowitemrole SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_workflowitemrole.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN eperson_legacy_id; - - -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - --- cwf_pooltask - -DROP INDEX cwf_pt_eperson_fk_idx; -DROP INDEX cwf_pt_workflow_eperson_fk_idx; - -ALTER TABLE cwf_pooltask RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_pooltask ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_pooltask SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_pooltask.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_pooltask DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_pooltask RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_pooltask ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_pooltask SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_pooltask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_pooltask DROP COLUMN eperson_legacy_id; - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - --- cwf_claimtask - -ALTER TABLE cwf_claimtask DROP CONSTRAINT cwf_claimtask_unique; -DROP INDEX cwf_ct_workflow_fk_idx; -DROP INDEX cwf_ct_workflow_eperson_fk_idx; -DROP INDEX cwf_ct_eperson_fk_idx; -DROP INDEX cwf_ct_wfs_fk_idx; -DROP INDEX cwf_ct_wfs_action_fk_idx; -DROP INDEX cwf_ct_wfs_action_e_fk_idx; - -ALTER TABLE cwf_claimtask RENAME COLUMN owner_id to eperson_legacy_id; -ALTER TABLE cwf_claimtask ADD owner_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_claimtask SET owner_id = (SELECT eperson.uuid FROM eperson WHERE cwf_claimtask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_claimtask DROP COLUMN eperson_legacy_id; - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - --- cwf_in_progress_user - -ALTER TABLE cwf_in_progress_user DROP CONSTRAINT cwf_in_progress_user_unique; -DROP INDEX cwf_ipu_workflow_fk_idx; -DROP INDEX cwf_ipu_eperson_fk_idx; - -ALTER TABLE cwf_in_progress_user RENAME COLUMN user_id to eperson_legacy_id; -ALTER TABLE cwf_in_progress_user ADD user_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_in_progress_user SET user_id = (SELECT eperson.uuid FROM eperson WHERE cwf_in_progress_user.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_in_progress_user DROP COLUMN eperson_legacy_id; -UPDATE cwf_in_progress_user SET finished = '0' WHERE finished IS NULL; - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql deleted file mode 100644 index 0402fc994887..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql +++ /dev/null @@ -1,27 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- UPDATE policies for claimtasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id JOIN item ON cwf_workflowitem.item_id = item.uuid) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT item2bundle.bundle_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT bundle2bitstream.bitstream_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Create policies for pooled tasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql deleted file mode 100644 index f582f37c6931..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE resource_type_id = 2 AND resource_id IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 1 AND resource_id IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 0 AND resource_id IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql deleted file mode 100644 index 70eb419d8fbb..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql deleted file mode 100644 index 541af73dfe01..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow (for DSpace 6.0) --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id RAW(16) REFERENCES item(uuid) UNIQUE, - collection_id RAW(16) REFERENCES collection(uuid), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id RAW(16) REFERENCES collection(uuid), -group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id RAW(16) REFERENCES eperson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id RAW(16) REFERENCES EPerson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id RAW(16) REFERENCES eperson(uuid) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id RAW(16) REFERENCES eperson(uuid), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql deleted file mode 100644 index f8f0e564e824..000000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES item(item_id) UNIQUE, - collection_id INTEGER REFERENCES collection(collection_id), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id integer REFERENCES collection(collection_id), -group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id integer REFERENCES eperson(eperson_id), - group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id INTEGER REFERENCES EPerson(eperson_id), - group_id INTEGER REFERENCES epersongroup(eperson_group_id) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id integer REFERENCES eperson(eperson_id) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id integer REFERENCES eperson(eperson_id), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml index 5e69ee9c4282..6b0ef3e9b9e3 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml @@ -34,6 +34,14 @@ + + + + + + @@ -43,12 +51,13 @@ class="org.dspace.importer.external.arxiv.metadatamapping.ArXivFieldMapping"> - - + + + xml @@ -56,7 +65,6 @@ - @@ -115,6 +123,77 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml deleted file mode 100644 index b9c11f8164d6..000000000000 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-requestitem-services.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-configuration-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-configuration-services.xml deleted file mode 100644 index c8197970a971..000000000000 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-configuration-services.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - dc.identifier.issn - - - - - - - - - diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml index 6fe8ddb07bec..76891d169c97 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml @@ -25,10 +25,38 @@ - + + + + + + + + dc.identifier.issn + + + + + + + + + + + + diff --git a/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml index 6dcaa43b08c8..3ce641d99c34 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-core-services.xml @@ -13,15 +13,6 @@ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd"> - - - - @@ -31,18 +22,13 @@ - - - - - + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml index 07aa36fb2bd6..452460501a54 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml @@ -20,6 +20,10 @@ + + + + @@ -51,7 +55,6 @@ org.dspace.app.rest.submit.step.CollectionStep collection - submission submit.progressbar.describe.stepone @@ -75,6 +78,28 @@ submission + + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + + + submit.progressbar.accessCondition + org.dspace.app.rest.submit.step.AccessConditionStep + accessCondition + + + + submit.progressbar.accessCondition + org.dspace.app.rest.submit.step.AccessConditionStep + accessCondition + + @@ -138,6 +203,8 @@ + + @@ -146,6 +213,8 @@ + + @@ -167,6 +236,27 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 74dfe082ddc7..05a4cc5add01 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -43,7 +43,7 @@ dspace.server.url = http://localhost db.driver = org.h2.Driver db.dialect=org.hibernate.dialect.H2Dialect # Use a 10 second database lock timeout to avoid occasional JDBC lock timeout errors -db.url = jdbc:h2:mem:test;LOCK_TIMEOUT=10000; +db.url = jdbc:h2:mem:test;LOCK_TIMEOUT=10000;NON_KEYWORDS=VALUE;TIME ZONE=UTC db.username = sa db.password = # H2's default schema is PUBLIC @@ -70,6 +70,17 @@ mail.server.disabled = true # (Defaults to a dummy/fake prefix of 123456789) handle.prefix = 123456789 +# Whether to enable the DSpace handle resolver endpoints necessary for +# https://github.com/DSpace/Remote-Handle-Resolver +# Defaults to "false" which means these handle resolver endpoints are not available. +handle.remote-resolver.enabled = true + +# Whether to enable the DSpace listhandles resolver that lists all available +# handles for this DSpace installation. +# Defaults to "false" which means is possible to obtain the list of handles +# of this DSpace installation, whenever the `handle.remote-resolver.enabled = true`. +handle.hide.listhandles = false + ##################### # LOGLEVEL SETTINGS # ##################### @@ -80,13 +91,18 @@ loglevel.dspace = INFO # loglevel.dspace: Log level for all DSpace-specific code (org.dspace.*) # Possible values (from most to least info): DEBUG, INFO, WARN, ERROR, FATAL +######################## +# IIIF TEST SETTINGS # +######################## +iiif.enabled = true +event.dispatcher.default.consumers = versioning, discovery, eperson, orcidqueue, iiif, qaeventsdelete ########################################### # CUSTOM UNIT / INTEGRATION TEST SETTINGS # ########################################### # custom dispatcher to be used by dspace-api IT that doesn't need SOLR event.dispatcher.exclude-discovery.class = org.dspace.event.BasicDispatcher -event.dispatcher.exclude-discovery.consumers = versioning, eperson +event.dispatcher.exclude-discovery.consumers = versioning, eperson, qaeventsdelete # Configure authority control for Unit Testing (in DSpaceControlledVocabularyTest) # (This overrides default, commented out settings in dspace.cfg) @@ -139,3 +155,22 @@ authentication-ip.Student = 6.6.6.6 useProxies = true proxies.trusted.ipranges = 7.7.7.7 proxies.trusted.include_ui_ip = true + +csvexport.dir = dspace-server-webapp/src/test/data/dspaceFolder/exports + +# For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN +management.health.solrOai.enabled = false + +# Enable researcher profiles and orcid synchronization for tests +researcher-profile.entity-type = Person +orcid.synchronization-enabled = true + +# Configuration settings required for Researcher Profiles +# These settings ensure "dspace.object.owner" field are indexed by Authority Control +choices.plugin.dspace.object.owner = EPersonAuthority +choices.presentation.dspace.object.owner = suggest +authority.controlled.dspace.object.owner = true + +# Configuration required for thorough testing of browse links +webui.browse.link.1 = author:dc.contributor.* +webui.browse.link.2 = subject:dc.subject.* diff --git a/dspace-api/src/test/data/dspaceFolder/config/modules/identifiers.cfg b/dspace-api/src/test/data/dspaceFolder/config/modules/identifiers.cfg new file mode 100644 index 000000000000..64512572ff73 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/modules/identifiers.cfg @@ -0,0 +1,49 @@ +#----------------------------------------------------------------------# +#---------------------IDENTIFIER CONFIGURATIONS------------------------# +#----------------------------------------------------------------------# +# These configs are used for additional identifier configuration such # +# as the Show Identifiers step which can "pre-mint" DOIs and Handles # +#----------------------------------------------------------------------# + +# Should configured identifiers (eg handle and DOI) be minted for (future) registration at workspace item creation? +# A handle created at this stage will act just like a regular handle created at archive time. +# A DOI created at this stage will be in a 'PENDING' status while in workspace and workflow. +# At the time of item install, the DOI filter (if any) will be applied and if the item matches the filter, the DOI +# status will be updated to TO_BE_REGISTERED. An administrator can also manually progress the DOI status, overriding +# any filters, in the item status page. +# This option doesn't require the Show Identifiers submission step to be visible. +# Default: false +identifiers.submission.register = false + +# This configuration property can be set to a filter name to determine if a PENDING DOI for an item +# should be queued for registration. If the filter doesn't match, the DOI will stay in PENDING or MINTED status +# so that the identifier itself persists in case it is considered for registration in the future. +# See doi-filter and other example filters in item-filters.xml. +# Default (always_true_filter) +identifiers.submission.filter.install = doi_filter + +# This optional configuration property can be set to a filter name, in case there are some initial rules to apply +# when first deciding whether a DOI should be be created for a new workspace item with a PENDING status. +# This filter is only applied if identifiers.submission.register is true. +# This filter is updated as submission data is saved. +# Default: (always_true_filter) +identifiers.submission.filter.workspace = doi_filter + +# If true, the workspace filter will be applied as submission data is saved. If the filter no longer +# matches the item, the DOI will be shifted into a MINTED status and not displayed in the submission section. +# If false, then once a DOI has been created with PENDING status it will remain that way until final item install +# Default: true +#identifiers.submission.strip_pending_during_submission = true + +# This configuration property can be set to a filter name to determine if an item processed by RegisterDOI curation +# task should be eligible for a DOI +identifiers.submission.filter.curation = always_true_filter + +# Show Register DOI button in item status page? +# Default: false +identifiers.item-status.register-doi = true + +# Which identifier types to show in submission step? +# Default: handle, doi (currently the only supported identifier 'types') +identifiers.submission.display = handle +identifiers.submission.display = doi \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml index d65167a56289..a9af7c66f5e8 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/access-conditions.xml @@ -3,10 +3,9 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd"> - + - - + @@ -18,7 +17,7 @@ - + @@ -31,7 +30,7 @@ - + @@ -43,13 +42,13 @@ - + @@ -60,4 +59,61 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml new file mode 100644 index 000000000000..8a5277ab2dac --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-openaire.xml @@ -0,0 +1,89 @@ + + + + + + + + + + + + + + + + + Project + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml index 590fd57cb694..37e1fb508953 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml @@ -6,6 +6,8 @@ + + @@ -13,64 +15,92 @@ - - - - - - + + + + Journal + - - - - - - + + + + Journal + - - - - - - + + + + OrgUnit + - - - - + + + + + + + Person + + - + + + + + + + + + Publication + + - + + + + Publication + none + + - - - - + + + + + - xml + Publication + + + + + + + Publication + none + + + - diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service-test.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service-test.xml deleted file mode 100644 index 206b801d0842..000000000000 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service-test.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml new file mode 100644 index 000000000000..8f7cc297d719 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/identifier-service.xml @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml new file mode 100644 index 000000000000..85e49239156c --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/iiif-processing.xml @@ -0,0 +1,11 @@ + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/item-filters.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/item-filters.xml new file mode 100644 index 000000000000..8bae32eaefd7 --- /dev/null +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/item-filters.xml @@ -0,0 +1,370 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + article$ + bachelorThesis$ + masterThesis$ + doctoralThesis$ + book$ + bookPart$ + review$ + conferenceObject$ + lecture$ + workingPaper$ + preprint$ + report$ + annotation$ + contributionToPeriodical$ + patent$ + dataset$ + other$ + + + + + + + + + + + + + 123456789/20 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 123456789/3 + 123456789/4 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml index 36e425ba6b0e..a197b2910bd6 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml @@ -22,6 +22,11 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml index fb9e31b9a006..206326f3db70 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml @@ -33,4 +33,18 @@ + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml index 5f86c7359890..29703e3ee07a 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml @@ -47,5 +47,12 @@ + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/spring-dspace-addon-sherpa-services.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/spring-dspace-addon-sherpa-services.xml deleted file mode 100644 index adb2340f10c7..000000000000 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/spring-dspace-addon-sherpa-services.xml +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow-actions.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow-actions.xml index 318d1ad3d754..0d074362279e 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow-actions.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow-actions.xml @@ -15,9 +15,12 @@ - + + + + - + @@ -63,6 +66,12 @@ + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow.xml index 6e987ae8b0f6..a83be3fa339b 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/workflow.xml @@ -153,6 +153,7 @@ + diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index f3cc2d20dcb5..6b7349616e2d 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -140,6 +140,7 @@ ispartofseries true + Technical Report series Enter the series and number assigned to this item by your community. @@ -286,6 +287,91 @@ it, please enter the types and the actual numbers or codes. +
    + + + dc + identifier + + true + + qualdrop_value + If the item has any identification numbers or codes associated with + it, please enter the types and the actual numbers or codes. + please give an identifier + + +
    + +
    + + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + You must enter at least the year. + + + + + dc + type + + true + + dropdown + Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key. + + + + + + + dc + identifier + isbn + true + + Book + onebox + Enter the ISBN of the book. + An ISBN is required. + + + + dc + identifier + isbn + true + + Book chapter + onebox + Enter the ISBN of the book in which this chapter appears. + + + +
    +
    @@ -350,6 +436,35 @@ it, please enter the types and the actual numbers or codes.
    + +
    + + + dc + title + + false + + onebox + Field required + + +
    + +
    + + + dc + type + + false + + onebox + Field required + + +
    + diff --git a/dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java b/dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java index 1abc4e017d14..5a5ce8bf6d4c 100644 --- a/dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java +++ b/dspace-api/src/test/java/org/dspace/AbstractDSpaceIntegrationTest.java @@ -18,6 +18,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.builder.AbstractBuilder; +import org.dspace.discovery.SearchUtils; import org.dspace.servicemanager.DSpaceKernelImpl; import org.dspace.servicemanager.DSpaceKernelInit; import org.junit.AfterClass; @@ -104,6 +105,7 @@ public static void destroyTestEnvironment() throws SQLException { // Unload DSpace services AbstractBuilder.destroy(); + SearchUtils.clearCachedSearchService(); // NOTE: We explicitly do NOT stop/destroy the kernel, as it is cached // in the Spring ApplicationContext. By default, to speed up tests, diff --git a/dspace-api/src/test/java/org/dspace/AbstractDSpaceTest.java b/dspace-api/src/test/java/org/dspace/AbstractDSpaceTest.java index e53d6a675d3e..36477556d3de 100644 --- a/dspace-api/src/test/java/org/dspace/AbstractDSpaceTest.java +++ b/dspace-api/src/test/java/org/dspace/AbstractDSpaceTest.java @@ -77,7 +77,7 @@ public static void initKernel() { //load the properties of the tests testProps = new Properties(); - URL properties = AbstractUnitTest.class.getClassLoader() + URL properties = AbstractDSpaceTest.class.getClassLoader() .getResource("test-config.properties"); testProps.load(properties.openStream()); diff --git a/dspace-api/src/test/java/org/dspace/AbstractIntegrationTest.java b/dspace-api/src/test/java/org/dspace/AbstractIntegrationTest.java index d437a773852a..725f9d2783e4 100644 --- a/dspace-api/src/test/java/org/dspace/AbstractIntegrationTest.java +++ b/dspace-api/src/test/java/org/dspace/AbstractIntegrationTest.java @@ -12,6 +12,7 @@ import java.io.IOException; import java.io.Writer; import java.nio.channels.FileChannel; +import java.nio.charset.StandardCharsets; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; @@ -118,7 +119,8 @@ private String getLocalConfigurationFilePath() { */ protected void appendToLocalConfiguration(String textToAppend) { String extraConfPath = getLocalConfigurationFilePath(); - try (Writer output = new BufferedWriter(new FileWriter(extraConfPath, true))) { + try (Writer output = new BufferedWriter( + new FileWriter(extraConfPath, StandardCharsets.UTF_8, true))) { output.append("\n"); output.append(textToAppend); output.flush(); diff --git a/dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java b/dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java index 2b6a73673ae7..6884b949a66a 100644 --- a/dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java +++ b/dspace-api/src/test/java/org/dspace/AbstractIntegrationTestWithDatabase.java @@ -15,6 +15,7 @@ import org.apache.logging.log4j.Logger; import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.authority.AuthoritySearchService; import org.dspace.authority.MockAuthoritySolrServiceImpl; import org.dspace.authorize.AuthorizeException; import org.dspace.builder.AbstractBuilder; @@ -28,11 +29,14 @@ import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.dspace.kernel.ServiceManager; +import org.dspace.qaevent.MockQAEventService; +import org.dspace.qaevent.service.QAEventService; import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.statistics.MockSolrLoggerServiceImpl; import org.dspace.statistics.MockSolrStatisticsCore; +import org.dspace.statistics.SolrStatisticsCore; import org.dspace.storage.rdbms.DatabaseUtils; -import org.jdom.Document; +import org.jdom2.Document; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -105,7 +109,7 @@ public static void initDatabase() { public void setUp() throws Exception { try { //Start a new context - context = new Context(Context.Mode.BATCH_EDIT); + context = new Context(Context.Mode.READ_WRITE); context.turnOffAuthorisationSystem(); //Find our global test EPerson account. If it doesn't exist, create it. @@ -183,17 +187,21 @@ public void destroy() throws Exception { searchService.reset(); // Clear the statistics core. serviceManager - .getServiceByName(null, MockSolrStatisticsCore.class) + .getServiceByName(SolrStatisticsCore.class.getName(), MockSolrStatisticsCore.class) .reset(); MockSolrLoggerServiceImpl statisticsService = serviceManager - .getServiceByName(null, MockSolrLoggerServiceImpl.class); + .getServiceByName("solrLoggerService", MockSolrLoggerServiceImpl.class); statisticsService.reset(); MockAuthoritySolrServiceImpl authorityService = serviceManager - .getServiceByName(null, MockAuthoritySolrServiceImpl.class); + .getServiceByName(AuthoritySearchService.class.getName(), MockAuthoritySolrServiceImpl.class); authorityService.reset(); + MockQAEventService qaEventService = serviceManager + .getServiceByName(QAEventService.class.getName(), MockQAEventService.class); + qaEventService.reset(); + // Reload our ConfigurationService (to reset configs to defaults again) DSpaceServicesFactory.getInstance().getConfigurationService().reloadConfig(); diff --git a/dspace-api/src/test/java/org/dspace/access/status/AccessStatusServiceTest.java b/dspace-api/src/test/java/org/dspace/access/status/AccessStatusServiceTest.java new file mode 100644 index 000000000000..87127f9cf8fd --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/access/status/AccessStatusServiceTest.java @@ -0,0 +1,126 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.fail; + +import java.sql.SQLException; + +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractUnitTest; +import org.dspace.access.status.factory.AccessStatusServiceFactory; +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit Tests for access status service + */ +public class AccessStatusServiceTest extends AbstractUnitTest { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AccessStatusServiceTest.class); + + private Collection collection; + private Community owningCommunity; + private Item item; + + protected CommunityService communityService = + ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + protected WorkspaceItemService workspaceItemService = + ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected InstallItemService installItemService = + ContentServiceFactory.getInstance().getInstallItemService(); + protected AccessStatusService accessStatusService = + AccessStatusServiceFactory.getInstance().getAccessStatusService(); + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + * + * Other methods can be annotated with @Before here or in subclasses + * but no execution order is guaranteed + */ + @Before + @Override + public void init() { + super.init(); + try { + context.turnOffAuthorisationSystem(); + owningCommunity = communityService.create(null, context); + collection = collectionService.create(context, owningCommunity); + item = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + context.restoreAuthSystemState(); + } catch (AuthorizeException ex) { + log.error("Authorization Error in init", ex); + fail("Authorization Error in init: " + ex.getMessage()); + } catch (SQLException ex) { + log.error("SQL Error in init", ex); + fail("SQL Error in init: " + ex.getMessage()); + } + } + + /** + * This method will be run after every test as per @After. It will + * clean resources initialized by the @Before methods. + * + * Other methods can be annotated with @After here or in subclasses + * but no execution order is guaranteed + */ + @After + @Override + public void destroy() { + context.turnOffAuthorisationSystem(); + try { + itemService.delete(context, item); + } catch (Exception e) { + // ignore + } + try { + collectionService.delete(context, collection); + } catch (Exception e) { + // ignore + } + try { + communityService.delete(context, owningCommunity); + } catch (Exception e) { + // ignore + } + context.restoreAuthSystemState(); + item = null; + collection = null; + owningCommunity = null; + try { + super.destroy(); + } catch (Exception e) { + // ignore + } + } + + @Test + public void testGetAccessStatus() throws Exception { + String status = accessStatusService.getAccessStatus(context, item); + assertNotEquals("testGetAccessStatus 0", status, DefaultAccessStatusHelper.UNKNOWN); + } +} diff --git a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java new file mode 100644 index 000000000000..51291ee9850d --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java @@ -0,0 +1,445 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.fail; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.time.LocalDate; +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractUnitTest; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.BundleService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class DefaultAccessStatusHelperTest extends AbstractUnitTest { + + private static final Logger log = LogManager.getLogger(DefaultAccessStatusHelperTest.class); + + private Collection collection; + private Community owningCommunity; + private Item itemWithoutBundle; + private Item itemWithoutBitstream; + private Item itemWithBitstream; + private Item itemWithEmbargo; + private Item itemWithDateRestriction; + private Item itemWithGroupRestriction; + private Item itemWithoutPolicy; + private Item itemWithoutPrimaryBitstream; + private Item itemWithPrimaryAndMultipleBitstreams; + private Item itemWithoutPrimaryAndMultipleBitstreams; + private DefaultAccessStatusHelper helper; + private Date threshold; + + protected CommunityService communityService = + ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + protected WorkspaceItemService workspaceItemService = + ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected InstallItemService installItemService = + ContentServiceFactory.getInstance().getInstallItemService(); + protected BundleService bundleService = + ContentServiceFactory.getInstance().getBundleService(); + protected BitstreamService bitstreamService = + ContentServiceFactory.getInstance().getBitstreamService(); + protected ResourcePolicyService resourcePolicyService = + AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + protected GroupService groupService = + EPersonServiceFactory.getInstance().getGroupService(); + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + * + * Other methods can be annotated with @Before here or in subclasses + * but no execution order is guaranteed + */ + @Before + @Override + public void init() { + super.init(); + try { + context.turnOffAuthorisationSystem(); + owningCommunity = communityService.create(null, context); + collection = collectionService.create(context, owningCommunity); + itemWithoutBundle = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutBitstream = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithBitstream = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithEmbargo = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithDateRestriction = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithGroupRestriction = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutPolicy = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutPrimaryBitstream = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithPrimaryAndMultipleBitstreams = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutPrimaryAndMultipleBitstreams = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + context.restoreAuthSystemState(); + } catch (AuthorizeException ex) { + log.error("Authorization Error in init", ex); + fail("Authorization Error in init: " + ex.getMessage()); + } catch (SQLException ex) { + log.error("SQL Error in init", ex); + fail("SQL Error in init: " + ex.getMessage()); + } + helper = new DefaultAccessStatusHelper(); + threshold = dateFrom(10000, 1, 1); + } + + /** + * This method will be run after every test as per @After. It will + * clean resources initialized by the @Before methods. + * + * Other methods can be annotated with @After here or in subclasses + * but no execution order is guaranteed + */ + @After + @Override + public void destroy() { + context.turnOffAuthorisationSystem(); + try { + itemService.delete(context, itemWithoutBundle); + itemService.delete(context, itemWithoutBitstream); + itemService.delete(context, itemWithBitstream); + itemService.delete(context, itemWithEmbargo); + itemService.delete(context, itemWithDateRestriction); + itemService.delete(context, itemWithGroupRestriction); + itemService.delete(context, itemWithoutPolicy); + itemService.delete(context, itemWithoutPrimaryBitstream); + itemService.delete(context, itemWithPrimaryAndMultipleBitstreams); + itemService.delete(context, itemWithoutPrimaryAndMultipleBitstreams); + } catch (Exception e) { + // ignore + } + try { + collectionService.delete(context, collection); + } catch (Exception e) { + // ignore + } + try { + communityService.delete(context, owningCommunity); + } catch (Exception e) { + // ignore + } + context.restoreAuthSystemState(); + itemWithoutBundle = null; + itemWithoutBitstream = null; + itemWithBitstream = null; + itemWithEmbargo = null; + itemWithDateRestriction = null; + itemWithGroupRestriction = null; + itemWithoutPolicy = null; + itemWithoutPrimaryBitstream = null; + itemWithPrimaryAndMultipleBitstreams = null; + itemWithoutPrimaryAndMultipleBitstreams = null; + collection = null; + owningCommunity = null; + helper = null; + threshold = null; + communityService = null; + collectionService = null; + itemService = null; + workspaceItemService = null; + installItemService = null; + bundleService = null; + bitstreamService = null; + resourcePolicyService = null; + groupService = null; + try { + super.destroy(); + } catch (Exception e) { + // ignore + } + } + + /** + * Test for a null item + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithNullItem() throws Exception { + String status = helper.getAccessStatusFromItem(context, null, threshold); + assertThat("testWithNullItem 0", status, equalTo(DefaultAccessStatusHelper.UNKNOWN)); + } + + /** + * Test for an item with no bundle + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutBundle() throws Exception { + String status = helper.getAccessStatusFromItem(context, itemWithoutBundle, threshold); + assertThat("testWithoutBundle 0", status, equalTo(DefaultAccessStatusHelper.METADATA_ONLY)); + } + + /** + * Test for an item with no bitstream + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + bundleService.create(context, itemWithoutBitstream, Constants.CONTENT_BUNDLE_NAME); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutBitstream, threshold); + assertThat("testWithoutBitstream 0", status, equalTo(DefaultAccessStatusHelper.METADATA_ONLY)); + } + + /** + * Test for an item with a basic bitstream (open access) + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithBitstream, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithBitstream, threshold); + assertThat("testWithBitstream 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + } + + /** + * Test for an item with an embargo + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithEmbargo() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithEmbargo, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Embargo"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(dateFrom(9999, 12, 31)); + policies.add(policy); + authorizeService.removeAllPolicies(context, bitstream); + authorizeService.addPolicies(context, policies, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithEmbargo, threshold); + assertThat("testWithEmbargo 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); + String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo, threshold); + assertThat("testWithEmbargo 1", embargoDate, equalTo(policy.getStartDate().toString())); + } + + /** + * Test for an item with an anonymous date restriction + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithDateRestriction() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithDateRestriction, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Restriction"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(dateFrom(10000, 1, 1)); + policies.add(policy); + authorizeService.removeAllPolicies(context, bitstream); + authorizeService.addPolicies(context, policies, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithDateRestriction, threshold); + assertThat("testWithDateRestriction 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED)); + } + + /** + * Test for an item with a group restriction + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithGroupRestriction() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithGroupRestriction, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Restriction"); + Group group = groupService.findByName(context, Group.ADMIN); + policy.setGroup(group); + policy.setAction(Constants.READ); + policies.add(policy); + authorizeService.removeAllPolicies(context, bitstream); + authorizeService.addPolicies(context, policies, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithGroupRestriction, threshold); + assertThat("testWithGroupRestriction 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED)); + } + + /** + * Test for an item with no policy + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutPolicy() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithoutPolicy, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + authorizeService.removeAllPolicies(context, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutPolicy, threshold); + assertThat("testWithoutPolicy 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED)); + } + + /** + * Test for an item with no primary bitstream + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutPrimaryBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithoutPrimaryBitstream, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "first"); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryBitstream, threshold); + assertThat("testWithoutPrimaryBitstream 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + } + + /** + * Test for an item with an open access bitstream + * and another primary bitstream on embargo + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithPrimaryAndMultipleBitstreams() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithPrimaryAndMultipleBitstreams, + Constants.CONTENT_BUNDLE_NAME); + bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + Bitstream primaryBitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bundle.setPrimaryBitstreamID(primaryBitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Embargo"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(dateFrom(9999, 12, 31)); + policies.add(policy); + authorizeService.removeAllPolicies(context, primaryBitstream); + authorizeService.addPolicies(context, policies, primaryBitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold); + assertThat("testWithPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); + String embargoDate = helper.getEmbargoFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold); + assertThat("testWithPrimaryAndMultipleBitstreams 1", embargoDate, equalTo(policy.getStartDate().toString())); + } + + /** + * Test for an item with an open access bitstream + * and another bitstream on embargo + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithNoPrimaryAndMultipleBitstreams() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithoutPrimaryAndMultipleBitstreams, + Constants.CONTENT_BUNDLE_NAME); + bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + Bitstream anotherBitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Embargo"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(dateFrom(9999, 12, 31)); + policies.add(policy); + authorizeService.removeAllPolicies(context, anotherBitstream); + authorizeService.addPolicies(context, policies, anotherBitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryAndMultipleBitstreams, threshold); + assertThat("testWithNoPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + String embargoDate = helper.getEmbargoFromItem(context, itemWithEmbargo, threshold); + assertThat("testWithNoPrimaryAndMultipleBitstreams 1", embargoDate, equalTo(null)); + } + + /** + * Create a Date from local year, month, day. + * + * @param year the year. + * @param month the month. + * @param day the day. + * @return the assembled date. + */ + private Date dateFrom(int year, int month, int day) { + return Date.from(LocalDate.of(year, month, day) + .atStartOfDay() + .atZone(ZoneId.systemDefault()) + .toInstant()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/administer/ProcessCleanerIT.java b/dspace-api/src/test/java/org/dspace/administer/ProcessCleanerIT.java new file mode 100644 index 000000000000..4676236cfee4 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/administer/ProcessCleanerIT.java @@ -0,0 +1,380 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.administer; + +import static org.apache.commons.lang.time.DateUtils.addDays; +import static org.dspace.content.ProcessStatus.COMPLETED; +import static org.dspace.content.ProcessStatus.FAILED; +import static org.dspace.content.ProcessStatus.RUNNING; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +import java.sql.SQLException; +import java.util.Date; +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.ProcessBuilder; +import org.dspace.content.ProcessStatus; +import org.dspace.scripts.Process; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ProcessService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Test; + +/** + * Integration tests for {@link ProcessCleaner}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ProcessCleanerIT extends AbstractIntegrationTestWithDatabase { + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + private ProcessService processService = ScriptServiceFactory.getInstance().getProcessService(); + + @Test + public void testWithoutProcessToDelete() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]")); + assertThat(messages, hasItem("Found 0 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + + } + + @Test + public void testWithoutSpecifiedStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]")); + assertThat(messages, hasItem("Found 2 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), nullValue()); + assertThat(processService.find(context, process_5.getID()), nullValue()); + assertThat(processService.find(context, process_6.getID()), notNullValue()); + assertThat(processService.find(context, process_7.getID()), notNullValue()); + + } + + @Test + public void testWithCompletedStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-c" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED]")); + assertThat(messages, hasItem("Found 2 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), nullValue()); + assertThat(processService.find(context, process_5.getID()), nullValue()); + assertThat(processService.find(context, process_6.getID()), notNullValue()); + assertThat(processService.find(context, process_7.getID()), notNullValue()); + + } + + @Test + public void testWithRunningStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-r" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [RUNNING]")); + assertThat(messages, hasItem("Found 2 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), notNullValue()); + assertThat(processService.find(context, process_5.getID()), notNullValue()); + assertThat(processService.find(context, process_6.getID()), nullValue()); + assertThat(processService.find(context, process_7.getID()), notNullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + @Test + public void testWithFailedStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(FAILED, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-f" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [FAILED]")); + assertThat(messages, hasItem("Found 2 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), notNullValue()); + assertThat(processService.find(context, process_5.getID()), notNullValue()); + assertThat(processService.find(context, process_6.getID()), notNullValue()); + assertThat(processService.find(context, process_7.getID()), nullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + @Test + public void testWithCompletedAndFailedStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(FAILED, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-c", "-f" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, FAILED]")); + assertThat(messages, hasItem("Found 4 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), nullValue()); + assertThat(processService.find(context, process_5.getID()), nullValue()); + assertThat(processService.find(context, process_6.getID()), notNullValue()); + assertThat(processService.find(context, process_7.getID()), nullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + @Test + public void testWithCompletedAndRunningStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-c", "-r" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, RUNNING]")); + assertThat(messages, hasItem("Found 4 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), nullValue()); + assertThat(processService.find(context, process_5.getID()), nullValue()); + assertThat(processService.find(context, process_6.getID()), nullValue()); + assertThat(processService.find(context, process_7.getID()), notNullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + @Test + public void testWithFailedAndRunningStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-f", "-r" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [FAILED, RUNNING]")); + assertThat(messages, hasItem("Found 3 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), notNullValue()); + assertThat(processService.find(context, process_5.getID()), notNullValue()); + assertThat(processService.find(context, process_6.getID()), nullValue()); + assertThat(processService.find(context, process_7.getID()), nullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + @Test + public void testWithCompletedFailedAndRunningStatus() throws Exception { + + Process process_1 = buildProcess(COMPLETED, addDays(new Date(), -2)); + Process process_2 = buildProcess(RUNNING, addDays(new Date(), -1)); + Process process_3 = buildProcess(FAILED, addDays(new Date(), -3)); + Process process_4 = buildProcess(COMPLETED, addDays(new Date(), -6)); + Process process_5 = buildProcess(COMPLETED, addDays(new Date(), -8)); + Process process_6 = buildProcess(RUNNING, addDays(new Date(), -7)); + Process process_7 = buildProcess(FAILED, addDays(new Date(), -8)); + Process process_8 = buildProcess(RUNNING, addDays(new Date(), -9)); + + configurationService.setProperty("process-cleaner.days", 5); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "process-cleaner", "-f", "-r", "-c" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + List messages = testDSpaceRunnableHandler.getInfoMessages(); + assertThat(messages, hasSize(3)); + assertThat(messages, hasItem("Searching for processes with status: [COMPLETED, FAILED, RUNNING]")); + assertThat(messages, hasItem("Found 5 processes to be deleted")); + assertThat(messages, hasItem("Process cleanup completed")); + + assertThat(processService.find(context, process_1.getID()), notNullValue()); + assertThat(processService.find(context, process_2.getID()), notNullValue()); + assertThat(processService.find(context, process_3.getID()), notNullValue()); + assertThat(processService.find(context, process_4.getID()), nullValue()); + assertThat(processService.find(context, process_5.getID()), nullValue()); + assertThat(processService.find(context, process_6.getID()), nullValue()); + assertThat(processService.find(context, process_7.getID()), nullValue()); + assertThat(processService.find(context, process_8.getID()), nullValue()); + + } + + private Process buildProcess(ProcessStatus processStatus, Date creationTime) throws SQLException { + return ProcessBuilder.createProcess(context, admin, "test", List.of()) + .withProcessStatus(processStatus) + .withCreationTime(creationTime) + .build(); + } +} diff --git a/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java b/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java index 7abe3618ed5a..63340698ac00 100644 --- a/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java +++ b/dspace-api/src/test/java/org/dspace/administer/StructBuilderIT.java @@ -8,6 +8,7 @@ package org.dspace.administer; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -18,9 +19,10 @@ import java.util.Iterator; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Source; -import javax.xml.transform.TransformerException; import javax.xml.transform.stream.StreamSource; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.AbstractIntegrationTest; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; @@ -29,13 +31,11 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; -import org.junit.After; +import org.dspace.handle.Handle; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.w3c.dom.Attr; import org.w3c.dom.Node; import org.xml.sax.SAXException; @@ -53,7 +53,7 @@ */ public class StructBuilderIT extends AbstractIntegrationTest { - private static final Logger log = LoggerFactory.getLogger(StructBuilderIT.class); + private static final Logger log = LogManager.getLogger(); private static final CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); @@ -89,27 +89,28 @@ public void setUp() throws SQLException, AuthorizeException, IOException { context.restoreAuthSystemState(); } - @After - public void tearDown() { - } + private static final String COMMUNITY_0_HANDLE = "https://hdl.handle.net/1/1"; + private static final String COMMUNITY_0_0_HANDLE = "https://hdl.handle.net/1/1.1"; + private static final String COLLECTION_0_0_0_HANDLE = "https://hdl.handle.net/1/1.1.1"; + private static final String COLLECTION_0_1_HANDLE = "https://hdl.handle.net/1/1.2"; /** Test structure document. */ private static final String IMPORT_DOCUMENT = "\n" + "\n" + - " \n" + + " \n" + " Top Community 0\n" + " A top level community\n" + " Testing 1 2 3\n" + " 1969\n" + " A sidebar\n" + - " \n" + + " \n" + " Sub Community 0.0\n" + " A sub community\n" + " Live from New York....\n" + " 1957\n" + " Another sidebar\n" + - " \n" + + " \n" + " Collection 0.0.0\n" + " A collection\n" + " Our next guest needs no introduction\n" + @@ -119,7 +120,14 @@ public void tearDown() { " Testing\n" + " \n" + " \n" + - " \n" + + " \n" + + " Sub Community 0.1\n" + + " A sub community with no handle\n" + + " Stop me if you've heard this one\n" + + " 2525\n" + + " One more sidebar\n" + + " \n" + + " \n" + " Collection 0.1\n" + " Another collection\n" + " Fourscore and seven years ago\n" + @@ -150,7 +158,56 @@ public void tearDown() { * @throws java.lang.Exception passed through. */ @Test - public void testImportStructure() + public void testImportStructureWithoutHandles() + throws Exception { + System.out.println("importStructure"); + + // Run the method under test and collect its output. + ByteArrayOutputStream outputDocument + = new ByteArrayOutputStream(IMPORT_DOCUMENT.length() * 2 * 2); + byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8); + context.turnOffAuthorisationSystem(); + try (InputStream input = new ByteArrayInputStream(inputBytes);) { + StructBuilder.importStructure(context, input, outputDocument, false); + } finally { + context.restoreAuthSystemState(); + } + + // Compare import's output with its input. + // N.B. here we rely on StructBuilder to emit communities and + // collections in the same order as the input document. If that changes, + // we will need a smarter NodeMatcher, probably based on children. + Source output = new StreamSource( + new ByteArrayInputStream(outputDocument.toByteArray())); + Source reference = new StreamSource( + new ByteArrayInputStream( + IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8))); + Diff myDiff = DiffBuilder.compare(reference).withTest(output) + .normalizeWhitespace() + .withAttributeFilter((Attr attr) -> + !attr.getName().equals("identifier")) + .checkForIdentical() + .build(); + + // Was there a difference? + // Always output differences -- one is expected. + ComparisonFormatter formatter = new DefaultComparisonFormatter(); + for (Difference difference : myDiff.getDifferences()) { + System.err.println(difference.toString(formatter)); + } + // Test for *significant* differences. + assertFalse("Output does not match input.", isDifferent(myDiff)); + + // TODO spot-check some objects. + } + + /** + * Test of importStructure method, with given Handles. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testImportStructureWithHandles() throws Exception { System.out.println("importStructure"); @@ -160,15 +217,37 @@ public void testImportStructure() byte[] inputBytes = IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8); context.turnOffAuthorisationSystem(); try (InputStream input = new ByteArrayInputStream(inputBytes);) { - StructBuilder.importStructure(context, input, outputDocument); - } catch (IOException | SQLException - | ParserConfigurationException | TransformerException ex) { - System.err.println(ex.getMessage()); - System.exit(1); + StructBuilder.importStructure(context, input, outputDocument, true); } finally { context.restoreAuthSystemState(); } + boolean found; + + // Check a chosen Community for the right Handle. + found = false; + for (Community community : communityService.findAllTop(context)) { + for (Handle handle : community.getHandles()) { + if (handle.getHandle().equals(COMMUNITY_0_HANDLE)) { + found = true; + break; + } + } + } + assertTrue("A community should have its specified handle", found); + + // Check a chosen Collection for the right Handle. + found = false; + for (Collection collection : collectionService.findAll(context)) { + for (Handle handle : collection.getHandles()) { + if (handle.getHandle().equals(COLLECTION_0_1_HANDLE)) { + found = true; + break; + } + } + } + assertTrue("A collection should have its specified handle", found); + // Compare import's output with its input. // N.B. here we rely on StructBuilder to emit communities and // collections in the same order as the input document. If that changes, @@ -180,7 +259,6 @@ public void testImportStructure() IMPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8))); Diff myDiff = DiffBuilder.compare(reference).withTest(output) .normalizeWhitespace() -// .withNodeFilter(new MyNodeFilter()) .withAttributeFilter((Attr attr) -> !attr.getName().equals("identifier")) .checkForIdentical() @@ -236,7 +314,6 @@ public void testExportStructure() EXPORT_DOCUMENT.getBytes(StandardCharsets.UTF_8))); Diff myDiff = DiffBuilder.compare(reference).withTest(output) .normalizeWhitespace() -// .withNodeFilter(new MyNodeFilter()) .withAttributeFilter((Attr attr) -> !attr.getName().equals("identifier")) .checkForIdentical() @@ -310,23 +387,4 @@ private boolean isDifferent(Diff diff) { // There must be at most one difference. return diffIterator.hasNext(); } - - /** - * Reject uninteresting nodes. (currently commented out of tests above) - */ - /*private static class MyNodeFilter implements Predicate { - private static final List dontCare = Arrays.asList( - "description", - "intro", - "copyright", - "sidebar", - "license", - "provenance"); - - @Override - public boolean test(Node node) { - String type = node.getLocalName(); - return ! dontCare.contains(type); - } - }*/ } diff --git a/dspace-api/src/test/java/org/dspace/alerts/SystemWideAlertServiceTest.java b/dspace-api/src/test/java/org/dspace/alerts/SystemWideAlertServiceTest.java new file mode 100644 index 000000000000..5d8d6ac594a6 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/alerts/SystemWideAlertServiceTest.java @@ -0,0 +1,202 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.alerts; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.when; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.logging.log4j.Logger; +import org.dspace.alerts.dao.SystemWideAlertDAO; +import org.dspace.alerts.service.SystemWideAlertService; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class SystemWideAlertServiceTest { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SystemWideAlertService.class); + + @InjectMocks + private SystemWideAlertServiceImpl systemWideAlertService; + + @Mock + private SystemWideAlertDAO systemWideAlertDAO; + + @Mock + private AuthorizeService authorizeService; + + @Mock + private Context context; + + @Mock + private SystemWideAlert systemWideAlert; + + @Mock + private EPerson eperson; + + + @Test + public void testCreate() throws Exception { + // Mock admin state + when(authorizeService.isAdmin(context)).thenReturn(true); + + // Declare objects utilized in unit test + SystemWideAlert systemWideAlert = new SystemWideAlert(); + systemWideAlert.setMessage("Test message"); + systemWideAlert.setAllowSessions(AllowSessionsEnum.ALLOW_ALL_SESSIONS); + systemWideAlert.setCountdownTo(null); + systemWideAlert.setActive(true); + + // Mock DAO to return our defined SystemWideAlert + when(systemWideAlertDAO.create(any(), any())).thenReturn(systemWideAlert); + + // The newly created SystemWideAlert's message should match our mocked SystemWideAlert's message + SystemWideAlert result = systemWideAlertService.create(context, "Test message", + AllowSessionsEnum.ALLOW_ALL_SESSIONS, null, true); + assertEquals("TestCreate 0", systemWideAlert.getMessage(), result.getMessage()); + // The newly created SystemWideAlert should match our mocked SystemWideAlert + assertEquals("TestCreate 1", systemWideAlert, result); + } + + + @Test + public void testFindAll() throws Exception { + // Declare objects utilized in unit test + List systemWideAlertList = new ArrayList<>(); + + // The SystemWideAlert(s) reported from our mocked state should match our systemWideAlertList + assertEquals("TestFindAll 0", systemWideAlertList, systemWideAlertService.findAll(context)); + } + + @Test + public void testFind() throws Exception { + // Mock DAO to return our mocked SystemWideAlert + when(systemWideAlertService.find(context, 0)).thenReturn(systemWideAlert); + + // The SystemWideAlert reported from our ID should match our mocked SystemWideAlert + assertEquals("TestFind 0", systemWideAlert, systemWideAlertService.find(context, 0)); + } + + @Test + public void testFindAllActive() throws Exception { + // Declare objects utilized in unit test + List systemWideAlertList = new ArrayList<>(); + + // The SystemWideAlert(s) reported from our mocked state should match our systemWideAlertList + assertEquals("TestFindAllActive 0", systemWideAlertList, systemWideAlertService.findAllActive(context, 10, 0)); + } + + + @Test + public void testUpdate() throws Exception { + // Mock admin state + when(authorizeService.isAdmin(context)).thenReturn(true); + + // Invoke impl of method update() + systemWideAlertService.update(context, systemWideAlert); + + // Verify systemWideAlertDAO.save was invoked twice to confirm proper invocation of both impls of update() + Mockito.verify(systemWideAlertDAO, times(1)).save(context, systemWideAlert); + } + + @Test + public void testDelete() throws Exception { + // Mock admin state + when(authorizeService.isAdmin(context)).thenReturn(true); + + // Invoke method delete() + systemWideAlertService.delete(context, systemWideAlert); + + // Verify systemWideAlertDAO.delete() ran once to confirm proper invocation of delete() + Mockito.verify(systemWideAlertDAO, times(1)).delete(context, systemWideAlert); + } + + @Test + public void canNonAdminUserLoginTrueTest() throws Exception { + // Mock the alert state + when(systemWideAlert.getAllowSessions()).thenReturn(AllowSessionsEnum.ALLOW_ALL_SESSIONS); + + // Mock DAO to return our defined systemWideAlertList + List systemWideAlertList = new ArrayList<>(); + systemWideAlertList.add(systemWideAlert); + when(systemWideAlertDAO.findAllActive(context, 1, 0)).thenReturn(systemWideAlertList); + + // Assert the non admin users can log in + assertTrue("CanNonAdminUserLogin 0", systemWideAlertService.canNonAdminUserLogin(context)); + } + + @Test + public void canNonAdminUserLoginFalseTest() throws Exception { + // Mock the alert state + when(systemWideAlert.getAllowSessions()).thenReturn(AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY); + + // Mock DAO to return our defined systemWideAlertList + List systemWideAlertList = new ArrayList<>(); + systemWideAlertList.add(systemWideAlert); + when(systemWideAlertDAO.findAllActive(context, 1, 0)).thenReturn(systemWideAlertList); + + // Assert the non admin users can log in + assertFalse("CanNonAdminUserLogin 1", systemWideAlertService.canNonAdminUserLogin(context)); + } + + @Test + public void canUserMaintainSessionAdminTest() throws Exception { + // Assert the admin user can log in + assertTrue("CanUserMaintainSession 0", systemWideAlertService.canNonAdminUserLogin(context)); + } + @Test + public void canUserMaintainSessionTrueTest() throws Exception { + // Mock admin state + when(authorizeService.isAdmin(context, eperson)).thenReturn(false); + + // Mock the alert state + when(systemWideAlert.getAllowSessions()).thenReturn(AllowSessionsEnum.ALLOW_CURRENT_SESSIONS_ONLY); + + // Mock DAO to return our defined systemWideAlertList + List systemWideAlertList = new ArrayList<>(); + systemWideAlertList.add(systemWideAlert); + when(systemWideAlertDAO.findAllActive(context, 1, 0)).thenReturn(systemWideAlertList); + + // Assert the non admin users can main session + assertTrue("CanUserMaintainSession 1", systemWideAlertService.canUserMaintainSession(context, eperson)); + } + + @Test + public void canUserMaintainSessionFalseTest() throws Exception { + // Mock admin state + when(authorizeService.isAdmin(context, eperson)).thenReturn(false); + + // Mock the alert state + when(systemWideAlert.getAllowSessions()).thenReturn(AllowSessionsEnum.ALLOW_ADMIN_SESSIONS_ONLY); + + // Mock DAO to return our defined systemWideAlertList + List systemWideAlertList = new ArrayList<>(); + systemWideAlertList.add(systemWideAlert); + when(systemWideAlertDAO.findAllActive(context, 1, 0)).thenReturn(systemWideAlertList); + + // Assert the non admin users cannot main session + assertFalse("CanUserMaintainSession 2", systemWideAlertService.canUserMaintainSession(context, eperson)); + } + + + +} diff --git a/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java new file mode 100644 index 000000000000..73f02e40494c --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/bulkaccesscontrol/BulkAccessControlIT.java @@ -0,0 +1,1860 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.bulkaccesscontrol; + +import static org.dspace.app.matcher.ResourcePolicyMatcher.matches; +import static org.dspace.authorize.ResourcePolicy.TYPE_CUSTOM; +import static org.dspace.authorize.ResourcePolicy.TYPE_INHERITED; +import static org.dspace.core.Constants.CONTENT_BUNDLE_NAME; +import static org.dspace.core.Constants.DEFAULT_BUNDLE_NAME; +import static org.dspace.core.Constants.READ; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; +import org.apache.commons.io.file.PathUtils; +import org.apache.commons.lang3.ArrayUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.mediafilter.FormatFilter; +import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory; +import org.dspace.app.mediafilter.service.MediaFilterService; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.BundleBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.core.SelfNamedPlugin; +import org.dspace.core.factory.CoreServiceFactory; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.SearchUtils; +import org.dspace.discovery.indexobject.IndexableItem; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Basic integration testing for the Bulk Access conditions Feature{@link BulkAccessControl}. + * + * @author Mohamed Eskander (mohamed.eskander at 4science.it) + */ +public class BulkAccessControlIT extends AbstractIntegrationTestWithDatabase { + + //key (in dspace.cfg) which lists all enabled filters by name + private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; + + //prefix (in dspace.cfg) for all filter properties + private static final String FILTER_PREFIX = "filter"; + + //suffix (in dspace.cfg) for input formats supported by each filter + private static final String INPUT_FORMATS_SUFFIX = "inputFormats"; + + private Path tempDir; + private String tempFilePath; + + private GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + private SearchService searchService = SearchUtils.getSearchService(); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + @Before + @Override + public void setUp() throws Exception { + + super.setUp(); + + tempDir = Files.createTempDirectory("bulkAccessTest"); + tempFilePath = tempDir + "/bulk-access.json"; + } + + @After + @Override + public void destroy() throws Exception { + PathUtils.deleteDirectory(tempDir); + super.destroy(); + } + + @Test + public void performBulkAccessWithAnonymousEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("An eperson to do the the Bulk Access Control must be specified") + )); + } + + @Test + public void performBulkAccessWithNotExistingEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + String randomUUID = UUID.randomUUID().toString(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", randomUUID}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("EPerson cannot be found: " + randomUUID) + )); + } + + @Test + public void performBulkAccessWithNotAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + + @Test + public void performBulkAccessWithCommunityAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .withAdminGroup(eperson) + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", community.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(0)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithCollectionAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", collection.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(0)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithItemAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).withAdminUser(eperson).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(0)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + @Test + public void performBulkAccessWithNotCollectionAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + // add eperson to admin group + Collection collectionOne = CollectionBuilder.createCollection(context, community) + .withName("collection") + .withAdminGroup(eperson) + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + ItemBuilder.createItem(context, collectionOne).build(); + ItemBuilder.createItem(context, collectionTwo).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", + "-u", collectionOne.getID().toString(), + "-u", collectionTwo.getID().toString(), + "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + + @Test + public void performBulkAccessWithNotCommunityAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // add eperson to admin group + Community communityOne = CommunityBuilder.createCommunity(context) + .withName("community") + .withAdminGroup(eperson) + .build(); + + Community communityTwo = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", + "-u", communityOne.getID().toString(), + "-u", communityTwo.getID().toString(), + "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + + @Test + public void performBulkAccessWithNotItemAdminEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + // add eperson to admin group + Item itemOne = ItemBuilder.createItem(context, collection) + .withAdminUser(eperson) + .build(); + + Item itemTwo = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", + "-u", itemOne.getID().toString(), + "-u", itemTwo.getID().toString(), + "-f", tempFilePath, + "-e", eperson.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Current user is not eligible to execute script bulk-access-control") + )); + } + + @Test + public void performBulkAccessWithoutRequiredParamTest() throws Exception { + + buildJsonFile(""); + + String[] args = new String[] {"bulk-access-control", "-f", tempFilePath, "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("A target uuid must be provided with at least on uuid") + )); + } + + @Test + public void performBulkAccessWithEmptyJsonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).withTitle("title").build(); + + context.restoreAuthSystemState(); + + buildJsonFile(""); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("Error parsing json file") + )); + } + + @Test + public void performBulkAccessWithWrongModeOfItemValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"wrong\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("wrong value for item mode") + )); + } + + @Test + public void performBulkAccessWithMissingModeOfItemValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("item mode node must be provided") + )); + } + + @Test + public void performBulkAccessWithWrongModeOfBitstreamValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"bitstream\": {\n" + + " \"mode\": \"wrong\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("wrong value for bitstream mode") + )); + } + + @Test + public void performBulkAccessWithMissingModeOfBitstreamValueTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"bitstream\": {\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("bitstream mode node must be provided") + )); + } + + @Test + public void performBulkAccessWithNotFoundAccessConditionNameTest() throws Exception { + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"wrongAccess\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("wrong access condition ") + )); + } + + @Test + public void performBulkAccessWithInvalidEmbargoAccessConditionDateTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"endDate\": \"2024-06-24T00:00:00Z\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("invalid access condition, The access condition embargo requires a start date.") + )); + } + + @Test + public void performBulkAccessWithInvalidLeaseAccessConditionDateTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String json = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"startDate\": \"2024-06-24T00:00:00Z\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(json); + + String[] args = new String[] {"bulk-access-control", "-u", item.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("invalid access condition, The access condition lease requires an end date.") + )); + } + + @Test + public void performBulkAccessForCommunityItemsWithBitstreamConstraintsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community communityOne = CommunityBuilder.createCommunity(context) + .withName("community one") + .build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": [\"" + UUID.randomUUID() + "\"]\n" + + " },\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", + "-u", communityOne.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("constraint is not supported when uuid isn't an Item") + )); + } + + @Test + public void performBulkAccessForMultipleItemsWithBitstreamConstraintsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community communityOne = CommunityBuilder.createCommunity(context) + .withName("community one") + .build(); + + Community communityTwo = CommunityBuilder.createCommunity(context) + .withName("community two") + .build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": [\"" + UUID.randomUUID() + "\"]\n" + + " },\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", + "-u", communityOne.getID().toString(), + "-u", communityTwo.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("constraint isn't supported when multiple uuids are provided") + )); + } + + @Test + public void performBulkAccessForSingleItemWithBitstreamConstraintsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("community one") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamOneContent = "Dummy content one"; + Bitstream bitstreamOne; + try (InputStream is = IOUtils.toInputStream(bitstreamOneContent, CharEncoding.UTF_8)) { + bitstreamOne = BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream one") + .build(); + } + + String bitstreamTwoContent = "Dummy content of bitstream two"; + Bitstream bitstreamTwo; + try (InputStream is = IOUtils.toInputStream(bitstreamTwoContent, CharEncoding.UTF_8)) { + bitstreamTwo = BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream two") + .build(); + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": [\"" + bitstreamOne.getID().toString() + "\"]\n" + + " },\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", + "-u", item.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(1)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItem( + containsString("Replacing Bitstream {" + bitstreamOne.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}"))); + + bitstreamOne = context.reloadEntity(bitstreamOne); + bitstreamTwo = context.reloadEntity(bitstreamTwo); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(bitstreamOne.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamOne.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(bitstreamTwo.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamTwo.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + } + + @Test + public void performBulkAccessWithAddModeAndEmptyAccessConditionsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"add\"\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = + new String[] {"bulk-access-control", "-u", parentCommunity.getID().toString(), "-f", tempFilePath, + "-e", admin.getEmail()}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasSize(1)); + assertThat(testDSpaceRunnableHandler.getErrorMessages(), hasItem( + containsString("accessConditions of item must be provided with mode") + )); + } + + @Test + public void performBulkAccessWithValidJsonTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Community subCommunityTwo = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community two") + .build(); + + Community subCommunityThree = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community three") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, subCommunityTwo) + .withName("collection two") + .build(); + + Collection collectionThree = CollectionBuilder.createCollection(context, subCommunityThree) + .withName("collection three") + .build(); + + Item itemOne = ItemBuilder.createItem(context, collectionOne).build(); + + Item itemTwo = ItemBuilder.createItem(context, collectionTwo).build(); + + Item itemThree = ItemBuilder.createItem(context, collectionThree).withTitle("item three title").build(); + + Item itemFour = ItemBuilder.createItem(context, collectionThree).withTitle("item four title").build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-u", collectionTwo.getID().toString(), + "-u", itemThree.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(3)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), containsInAnyOrder( + containsString("Replacing Item {" + itemOne.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}"), + containsString("Replacing Item {" + itemTwo.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}"), + containsString("Replacing Item {" + itemThree.getID() + + "} policy to access conditions:{embargo, start_date=2024-06-24}") + )); + + itemOne = context.reloadEntity(itemOne); + itemTwo = context.reloadEntity(itemTwo); + itemThree = context.reloadEntity(itemThree); + itemFour = context.reloadEntity(itemFour); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(itemOne.getResourcePolicies(), hasSize(1)); + assertThat(itemOne.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(itemTwo.getResourcePolicies(), hasSize(1)); + assertThat(itemTwo.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(itemThree.getResourcePolicies(), hasSize(1)); + assertThat(itemThree.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(itemFour.getResourcePolicies().size(), is(1)); + assertThat(itemFour.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + + + + + } + + @Test + public void performBulkAccessWithReplaceModeAndEmptyAccessConditionsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Community subCommunityTwo = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community two") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + Collection collectionTwo = CollectionBuilder.createCollection(context, subCommunityTwo) + .withName("collection two") + .build(); + + for (int i = 0; i < 20 ; i++) { + ItemBuilder.createItem(context, collectionOne).build(); + } + + for (int i = 0; i < 5 ; i++) { + Item item = ItemBuilder.createItem(context, collectionTwo).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream") + .build(); + } + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\"\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"replace\"\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-u", collectionTwo.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(60)); + + List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); + List itemsOfSubCommTwo = findItems("location.comm:" + subCommunityTwo.getID()); + + assertThat(itemsOfSubCommOne, hasSize(10)); + assertThat(itemsOfSubCommTwo, hasSize(5)); + + assertThat(itemsOfSubCommOne.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(0L)); + + assertThat(itemsOfSubCommTwo.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(5L)); + + for (Item item : itemsOfSubCommOne) { + assertThat(item.getResourcePolicies(), hasSize(1)); + assertThat(item.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + } + + for (Item item : itemsOfSubCommTwo) { + assertThat(item.getResourcePolicies(), hasSize(1)); + assertThat(item.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItems( + containsString("Cleaning Item {" + item.getID() + "} policies"), + containsString("Inheriting policies from owning Collection in Item {" + item.getID() + "") + )); + + List bitstreams = findAllBitstreams(item); + + for (Bitstream bitstream : bitstreams) { + assertThat(bitstream.getResourcePolicies(), hasSize(1)); + assertThat(bitstream.getResourcePolicies(), hasItem( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED) + )); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItems( + containsString("Cleaning Bitstream {" + bitstream.getID() + "} policies"), + containsString("Inheriting policies from owning Collection in Bitstream {" + bitstream.getID() + "") + )); + } + } + } + + @Test + public void performBulkAccessWithAddModeTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + for (int i = 0; i < 5 ; i++) { + + Item item = ItemBuilder.createItem(context, collectionOne).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream") + .build(); + } + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"add\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"endDate\": \"2023-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(10)); + + List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); + + assertThat(itemsOfSubCommOne, hasSize(5)); + + assertThat(itemsOfSubCommOne.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(5L)); + + for (Item item : itemsOfSubCommOne) { + assertThat(item.getResourcePolicies(), hasSize(3)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED), + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + List bitstreams = findAllBitstreams(item); + + for (Bitstream bitstream : bitstreams) { + assertThat(bitstream.getResourcePolicies(), hasSize(3)); + assertThat(bitstream.getResourcePolicies(), containsInAnyOrder( + matches(Constants.READ, anonymousGroup, ResourcePolicy.TYPE_INHERITED), + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + } + } + } + + @Test + public void performBulkAccessWithReplaceModeTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + for (int i = 0; i < 3 ; i++) { + + Item item = ItemBuilder.createItem(context, collectionOne).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bistream") + .build(); + } + } + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"endDate\": \"2023-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(6)); + + List itemsOfSubCommOne = findItems("location.comm:" + subCommunityOne.getID()); + + assertThat(itemsOfSubCommOne, hasSize(3)); + + assertThat(itemsOfSubCommOne.stream() + .flatMap(item -> findAllBitstreams(item).stream()) + .count(), is(3L)); + + for (Item item : itemsOfSubCommOne) { + assertThat(item.getResourcePolicies(), hasSize(2)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItem( + containsString("Replacing Item {" + item.getID() + + "} policy to access conditions:{openaccess, embargo, start_date=2024-06-24}") + )); + + List bitstreams = findAllBitstreams(item); + + for (Bitstream bitstream : bitstreams) { + assertThat(bitstream.getResourcePolicies(), hasSize(2)); + assertThat(bitstream.getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasItem( + containsString("Replacing Bitstream {" + bitstream.getID() + + "} policy to access conditions:{openaccess, lease, end_date=2023-06-24}") + )); + } + } + } + + @Test + public void performBulkAccessAndCheckDerivativeBitstreamsPoliciesTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunityOne = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collectionOne = CollectionBuilder.createCollection(context, subCommunityOne) + .withName("collection one") + .build(); + + Item item = ItemBuilder.createItem(context, collectionOne).build(); + + Bundle bundle = BundleBuilder.createBundle(context, item) + .withName("ORIGINAL") + .build(); + + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + BitstreamBuilder.createBitstream(context, bundle, is) + .withName("bitstream") + .withFormat("TEXT") + .withMimeType("text/plain") + .build(); + } + + List formatFilters = new ArrayList<>(); + Map> filterFormats = new HashMap<>(); + MediaFilterService mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService(); + + String[] filterNames = + DSpaceServicesFactory.getInstance() + .getConfigurationService() + .getArrayProperty(MEDIA_FILTER_PLUGINS_KEY); + + + for (int i = 0; i < filterNames.length; i++) { + + //get filter of this name & add to list of filters + FormatFilter filter = + (FormatFilter) CoreServiceFactory.getInstance() + .getPluginService() + .getNamedPlugin(FormatFilter.class, filterNames[i]); + formatFilters.add(filter); + + String filterClassName = filter.getClass().getName(); + + String pluginName = null; + + if (SelfNamedPlugin.class.isAssignableFrom(filter.getClass())) { + //Get the plugin instance name for this class + pluginName = ((SelfNamedPlugin) filter).getPluginInstanceName(); + } + + String[] formats = + DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty( + FILTER_PREFIX + "." + filterClassName + + (pluginName != null ? "." + pluginName : "") + + "." + INPUT_FORMATS_SUFFIX); + + //add to internal map of filters to supported formats + if (ArrayUtils.isNotEmpty(formats)) { + filterFormats.put(filterClassName + + (pluginName != null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR + + pluginName : ""), + Arrays.asList(formats)); + } + } + + mediaFilterService.setFilterClasses(formatFilters); + mediaFilterService.setFilterFormats(filterFormats); + + // here will create derivative bitstreams + mediaFilterService.applyFiltersItem(context, item); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"bitstream\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"openaccess\"\n" + + " },\n" + + " {\n" + + " \"name\": \"lease\",\n" + + " \"endDate\": \"2023-06-24\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunityOne.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(2)); + + item = context.reloadEntity(item); + + Bundle originalBundle = item.getBundles(DEFAULT_BUNDLE_NAME).get(0); + Bundle textBundle = item.getBundles("TEXT").get(0); + + assertThat(item.getResourcePolicies(), hasSize(2)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null) + )); + + assertThat(originalBundle.getBitstreams().get(0).getResourcePolicies(), hasSize(2)); + assertThat(originalBundle.getBitstreams().get(0).getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + + assertThat(textBundle.getBitstreams().get(0).getResourcePolicies(), hasSize(2)); + assertThat(textBundle.getBitstreams().get(0).getResourcePolicies(), containsInAnyOrder( + matches(READ, anonymousGroup, "openaccess", TYPE_CUSTOM), + matches(Constants.READ, anonymousGroup, "lease", TYPE_CUSTOM, null, "2023-06-24", null) + )); + } + + @Test + public void performBulkAccessWithReplaceModeAndAppendModeIsEnabledTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context).withName("special network").build(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("collection one") + .withDefaultItemRead(group) + .build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + context.restoreAuthSystemState(); + + String jsonOne = "{ \"item\": {\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"embargo\",\n" + + " \"startDate\": \"2024-06-24\"\n" + + " }\n" + + " ]\n" + + " }}\n"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", item.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + try { + configurationService.setProperty("core.authorization.installitem.inheritance-read.append-mode", true); + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), + testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(2)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), containsInAnyOrder( + containsString("Replacing Item {" + item.getID() + "} policy to access conditions:" + + "{embargo, start_date=2024-06-24}"), + containsString("Inheriting policies from owning Collection in Item {" + item.getID() + "}") + )); + + item = context.reloadEntity(item); + + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + assertThat(item.getResourcePolicies(), hasSize(2)); + assertThat(item.getResourcePolicies(), containsInAnyOrder( + matches(Constants.READ, anonymousGroup, "embargo", TYPE_CUSTOM, "2024-06-24", null, null), + matches(Constants.READ, group, TYPE_INHERITED) + )); + } finally { + configurationService.setProperty("core.authorization.installitem.inheritance-read.append-mode", false); + } + } + + @Test + public void performBulkAccessWithReplaceModeOnItemsWithMultipleBundlesTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group adminGroup = groupService.findByName(context, Group.ADMIN); + + Community parentCommunity = CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Community subCommunity = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("sub community one") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, subCommunity) + .withName("collection one") + .build(); + + Item itemOne = ItemBuilder.createItem(context, collection).build(); + Item itemTwo = ItemBuilder.createItem(context, collection).build(); + ItemBuilder.createItem(context, collection).build(); + + Bundle bundleOne = BundleBuilder.createBundle(context, itemOne) + .withName("ORIGINAL") + .build(); + + Bundle bundleTwo = BundleBuilder.createBundle(context, itemTwo) + .withName("ORIGINAL") + .build(); + + BundleBuilder.createBundle(context, itemTwo) + .withName("ORIGINAL") + .build(); + + BundleBuilder.createBundle(context, itemOne) + .withName("TEXT") + .build(); + + Bitstream bitstreamOne; + Bitstream bitstreamTwo; + String bitstreamContent = "Dummy content"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstreamOne = + BitstreamBuilder.createBitstream(context, bundleOne, is) + .withName("bistream of bundle one") + .build(); + } + + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstreamTwo = + BitstreamBuilder.createBitstream(context, bundleTwo, is) + .withName("bitstream of bundle two") + .build(); + } + + context.restoreAuthSystemState(); + + String jsonOne = "{\n" + + " \"bitstream\": {\n" + + " \"constraints\": {\n" + + " \"uuid\": []\n" + + " },\n" + + " \"mode\": \"replace\",\n" + + " \"accessConditions\": [\n" + + " {\n" + + " \"name\": \"administrator\",\n" + + " \"startDate\": null,\n" + + " \"endDate\": null\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + + buildJsonFile(jsonOne); + + String[] args = new String[] { + "bulk-access-control", + "-u", subCommunity.getID().toString(), + "-f", tempFilePath, + "-e", admin.getEmail() + }; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getInfoMessages(), hasSize(2)); + + assertThat(testDSpaceRunnableHandler.getInfoMessages(), containsInAnyOrder( + containsString("Replacing Bitstream {" + bitstreamOne.getID() + + "} policy to access conditions:{administrator}"), + containsString("Replacing Bitstream {" + bitstreamTwo.getID() + + "} policy to access conditions:{administrator}") + )); + + bitstreamOne = context.reloadEntity(bitstreamOne); + bitstreamTwo = context.reloadEntity(bitstreamTwo); + + assertThat(bitstreamOne.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamOne.getResourcePolicies(), hasItem( + matches(READ, adminGroup, "administrator", TYPE_CUSTOM) + )); + + assertThat(bitstreamTwo.getResourcePolicies(), hasSize(1)); + assertThat(bitstreamTwo.getResourcePolicies(), hasItem( + matches(READ, adminGroup, "administrator", TYPE_CUSTOM) + )); + } + + @Test + public void performBulkAccessWithHelpParamTest() throws Exception { + + String[] args = new String[] {"bulk-access-control", "-h"}; + + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + + assertThat(testDSpaceRunnableHandler.getErrorMessages(), empty()); + assertThat(testDSpaceRunnableHandler.getWarningMessages(), empty()); + } + + private List findItems(String query) throws SearchServiceException { + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableItem.TYPE); + discoverQuery.setQuery(query); + + return searchService.search(context, discoverQuery) + .getIndexableObjects() + .stream() + .map(indexableObject -> + ((IndexableItem) indexableObject).getIndexedObject()) + .collect(Collectors.toList()); + } + + private List findAllBitstreams(Item item) { + return item.getBundles(CONTENT_BUNDLE_NAME) + .stream() + .flatMap(bundle -> bundle.getBitstreams().stream()) + .collect(Collectors.toList()); + } + + private void buildJsonFile(String json) throws IOException { + File file = new File(tempDir + "/bulk-access.json"); + Path path = Paths.get(file.getAbsolutePath()); + Files.writeString(path, json, StandardCharsets.UTF_8); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java index f767ba1663ae..0b7fd8026803 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java @@ -99,8 +99,9 @@ public void metadataExportWithoutFileParameter() script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } } @@ -206,8 +207,9 @@ public void metadataExportToCsvTest_NonValidIdentifier() throws Exception { script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } Exception exceptionDuringTestRun = testDSpaceRunnableHandler.getException(); @@ -235,8 +237,9 @@ public void metadataExportToCsvTest_NonValidDSOType() throws Exception { script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } Exception exceptionDuringTestRun = testDSpaceRunnableHandler.getException(); diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportSearchIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportSearchIT.java new file mode 100644 index 000000000000..3a972692efeb --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportSearchIT.java @@ -0,0 +1,253 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.bulkedit; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.io.Reader; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import com.google.common.io.Files; +import com.opencsv.CSVReader; +import com.opencsv.exceptions.CsvException; +import org.apache.log4j.Logger; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchUtils; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +public class MetadataExportSearchIT extends AbstractIntegrationTestWithDatabase { + + private String subject1 = "subject1"; + private String subject2 = "subject2"; + private int numberItemsSubject1 = 30; + private int numberItemsSubject2 = 2; + private Item[] itemsSubject1 = new Item[numberItemsSubject1]; + private Item[] itemsSubject2 = new Item[numberItemsSubject2]; + private String filename; + private Collection collection; + private Logger logger = Logger.getLogger(MetadataExportSearchIT.class); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private SearchService searchService; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + + searchService = SearchUtils.getSearchService(); + + // dummy search so that the SearchService gets called in a test context first + DiscoverQuery query = new DiscoverQuery(); + query.setMaxResults(0); + searchService.search(context, query); + + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + collection = CollectionBuilder.createCollection(context, community).build(); + filename = configurationService.getProperty("dspace.dir") + + testProps.get("test.exportcsv").toString(); + + + for (int i = 0; i < numberItemsSubject1; i++) { + itemsSubject1[i] = ItemBuilder.createItem(context, collection) + .withTitle(String.format("%s item %d", subject1, i)) + .withSubject(subject1) + .withIssueDate("2020-09-" + i) + .build(); + } + + for (int i = 0; i < numberItemsSubject2; i++) { + itemsSubject2[i] = ItemBuilder.createItem(context, collection) + .withTitle(String.format("%s item %d", subject2, i)) + .withSubject(subject2) + .withIssueDate("2021-09-" + i) + .build(); + } + context.restoreAuthSystemState(); + } + + private void checkItemsPresentInFile(String filename, Item[] items) throws IOException, CsvException { + File file = new File(filename); + Reader reader = Files.newReader(file, Charset.defaultCharset()); + CSVReader csvReader = new CSVReader(reader); + + + List lines = csvReader.readAll(); + //length + 1 is because of 1 row extra for the headers + assertEquals(items.length + 1, lines.size()); + + List ids = new ArrayList<>(); + //ignoring the first row as this only contains headers; + logger.debug("checking content of lines"); + for (int i = 1; i < lines.size(); i++) { + logger.debug(String.join(", ", lines.get(i))); + ids.add(lines.get(i)[0]); + } + + for (Item item : items) { + assertTrue(ids.contains(item.getID().toString())); + } + } + + @Test + public void metadateExportSearchQueryTest() throws Exception { + int result = runDSpaceScript("metadata-export-search", "-q", "subject:" + subject1, "-n", filename); + + assertEquals(0, result); + checkItemsPresentInFile(filename, itemsSubject1); + + + result = runDSpaceScript("metadata-export-search", "-q", "subject: " + subject2, "-n", filename); + + assertEquals(0, result); + checkItemsPresentInFile(filename, itemsSubject2); + } + + @Test + public void exportMetadataSearchSpecificContainerTest() throws Exception { + context.turnOffAuthorisationSystem(); + Community community2 = CommunityBuilder.createCommunity(context).build(); + Collection collection2 = CollectionBuilder.createCollection(context, community2).build(); + + int numberItemsDifferentCollection = 15; + Item[] itemsDifferentCollection = new Item[numberItemsDifferentCollection]; + for (int i = 0; i < numberItemsDifferentCollection; i++) { + itemsDifferentCollection[i] = ItemBuilder.createItem(context, collection2) + .withTitle("item different collection " + i) + .withSubject(subject1) + .build(); + } + + //creating some items with a different subject to make sure the query still works + for (int i = 0; i < 5; i++) { + ItemBuilder.createItem(context, collection2) + .withTitle("item different collection, different subject " + i) + .withSubject(subject2) + .build(); + } + context.restoreAuthSystemState(); + + int result = runDSpaceScript( + "metadata-export-search", "-q", "subject: " + subject1, "-s", collection2.getID().toString(), "-n", filename + ); + + assertEquals(0, result); + checkItemsPresentInFile(filename, itemsDifferentCollection); + } + + @Test + public void exportMetadataSearchFilter() throws Exception { + int result = runDSpaceScript("metadata-export-search", "-f", "subject,equals=" + subject1, "-n", filename); + + assertEquals(0, result); + checkItemsPresentInFile(filename, itemsSubject1); + } + + @Test + public void exportMetadataSearchFilterDate() throws Exception { + int result = runDSpaceScript( + "metadata-export-search", "-f", "dateIssued,equals=[2000 TO 2020]", "-n", filename + ); + + assertEquals(0, result); + checkItemsPresentInFile(filename, itemsSubject1); + } + + @Test + public void exportMetadataSearchMultipleFilters() throws Exception { + int result = runDSpaceScript( + "metadata-export-search", "-f", "subject,equals=" + subject1, "-f", + "title,equals=" + String.format("%s item %d", subject1, 0), "-n", filename + ); + + assertEquals(0, result); + Item[] expectedResult = Arrays.copyOfRange(itemsSubject1, 0, 1); + checkItemsPresentInFile(filename, expectedResult); + } + + @Test + public void exportMetadataSearchEqualsFilterTest() + throws Exception { + context.turnOffAuthorisationSystem(); + Item wellBeingItem = ItemBuilder.createItem(context, collection) + .withTitle("test item well-being") + .withSubject("well-being") + .build(); + + ItemBuilder.createItem(context, collection) + .withTitle("test item financial well-being") + .withSubject("financial well-being") + .build(); + + context.restoreAuthSystemState(); + + int result = runDSpaceScript("metadata-export-search", "-f", "subject,equals=well-being", "-n", filename); + + assertEquals(0, result); + Item[] expectedResult = new Item[] {wellBeingItem}; + checkItemsPresentInFile(filename, expectedResult); + } + + @Test + public void exportMetadataSearchInvalidDiscoveryQueryTest() throws Exception { + int result = runDSpaceScript("metadata-export-search", "-q", "blabla", "-n", filename); + + assertEquals(0, result); + Item[] items = {}; + checkItemsPresentInFile(filename, items); + } + + @Test + public void exportMetadataSearchNoResultsTest() throws Exception { + int result = runDSpaceScript( + "metadata-export-search", "-f", "subject,equals=notExistingSubject", "-n", filename + ); + + assertEquals(0, result); + Item[] items = {}; + checkItemsPresentInFile(filename, items); + } + + @Test + public void exportMetadataSearchNonExistinFacetsTest() throws Exception { + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] {"metadata-export-search", "-f", "nonExisting,equals=" + subject1, "-f", + "title,equals=" + String.format("%s item %d", subject1, 0), "-n", filename}; + int result = ScriptLauncher.handleScript( + args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl + ); + + assertEquals(0, result); // exception should be handled, so the script should finish with 0 + + Exception exception = testDSpaceRunnableHandler.getException(); + assertNotNull(exception); + assertEquals("nonExisting is not a valid search filter", exception.getMessage()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java index 2b30ea8cd358..e50f7913ad70 100644 --- a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java +++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportIT.java @@ -19,6 +19,7 @@ import org.apache.commons.cli.ParseException; import org.apache.commons.collections4.IteratorUtils; +import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.app.launcher.ScriptLauncher; @@ -47,12 +48,16 @@ public class MetadataImportIT extends AbstractIntegrationTestWithDatabase { - private ItemService itemService - = ContentServiceFactory.getInstance().getItemService(); - private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); - private RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + private final ItemService itemService + = ContentServiceFactory.getInstance().getItemService(); + private final EPersonService ePersonService + = EPersonServiceFactory.getInstance().getEPersonService(); + private final RelationshipService relationshipService + = ContentServiceFactory.getInstance().getRelationshipService(); - Collection collection; + private Collection collection; + private Collection publicationCollection; + private Collection personCollection; @Before @Override @@ -61,6 +66,12 @@ public void setUp() throws Exception { context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context).build(); this.collection = CollectionBuilder.createCollection(context, community).build(); + this.publicationCollection = CollectionBuilder.createCollection(context, community) + .withEntityType("Publication") + .build(); + this.personCollection = CollectionBuilder.createCollection(context, community) + .withEntityType("Person") + .build(); context.restoreAuthSystemState(); } @@ -82,6 +93,42 @@ public void metadataImportTest() throws Exception { context.restoreAuthSystemState(); } + @Test + public void metadataImportIntoCollectionWithEntityTypeWithTemplateEnabledTest() throws Exception { + String[] csv = {"id,collection,dc.title,dc.contributor.author", + "+," + publicationCollection.getHandle() + ",\"Test Import 1\"," + "\"Donald, SmithImported\""}; + performImportScript(csv, true); + Item importedItem = findItemByName("Test Import 1"); + assertTrue(StringUtils.equals(itemService.getMetadata(importedItem, "dc", "contributor", "author", Item.ANY) + .get(0).getValue(), "Donald, SmithImported")); + assertTrue(StringUtils.equals(itemService.getMetadata(importedItem, "dspace", "entity", "type", Item.ANY) + .get(0).getValue(), "Publication")); + eperson = ePersonService.findByEmail(context, eperson.getEmail()); + assertEquals(importedItem.getSubmitter(), eperson); + + context.turnOffAuthorisationSystem(); + itemService.delete(context, itemService.find(context, importedItem.getID())); + context.restoreAuthSystemState(); + } + + @Test + public void metadataImportIntoCollectionWithEntityTypeWithTemplateDisabledTest() throws Exception { + String[] csv = {"id,collection,dc.title,dc.contributor.author", + "+," + publicationCollection.getHandle() + ",\"Test Import 1\"," + "\"Donald, SmithImported\""}; + performImportScript(csv, false); + Item importedItem = findItemByName("Test Import 1"); + assertTrue(StringUtils.equals(itemService.getMetadata(importedItem, "dc", "contributor", "author", Item.ANY) + .get(0).getValue(), "Donald, SmithImported")); + assertEquals(0, itemService.getMetadata(importedItem, "dspace", "entity", "type", Item.ANY) + .size()); + eperson = ePersonService.findByEmail(context, eperson.getEmail()); + assertEquals(importedItem.getSubmitter(), eperson); + + context.turnOffAuthorisationSystem(); + itemService.delete(context, itemService.find(context, importedItem.getID())); + context.restoreAuthSystemState(); + } + @Test(expected = ParseException.class) public void metadataImportWithoutEPersonParameterTest() throws IllegalAccessException, InstantiationException, ParseException { @@ -97,15 +144,16 @@ public void metadataImportWithoutEPersonParameterTest() script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } } @Test public void relationshipMetadataImportTest() throws Exception { context.turnOffAuthorisationSystem(); - Item item = ItemBuilder.createItem(context, collection).withEntityType("Publication") + Item item = ItemBuilder.createItem(context, publicationCollection) .withTitle("Publication1").build(); EntityType publication = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); EntityType person = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); @@ -114,12 +162,12 @@ public void relationshipMetadataImportTest() throws Exception { context.restoreAuthSystemState(); String[] csv = {"id,collection,dc.title,relation.isPublicationOfAuthor,dspace.entity.type", - "+," + collection.getHandle() + ",\"Test Import 1\"," + item.getID() + ",Person"}; + "+," + personCollection.getHandle() + ",\"Test Import 1\"," + item.getID() + ",Person"}; performImportScript(csv); Item importedItem = findItemByName("Test Import 1"); - assertEquals(relationshipService.findByItem(context, importedItem).size(), 1); + assertEquals(1, relationshipService.findByItem(context, importedItem).size()); context.turnOffAuthorisationSystem(); itemService.delete(context, itemService.find(context, importedItem.getID())); context.restoreAuthSystemState(); @@ -128,11 +176,11 @@ public void relationshipMetadataImportTest() throws Exception { @Test public void relationshipMetadataImporAlreadyExistingItemTest() throws Exception { context.turnOffAuthorisationSystem(); - Item personItem = ItemBuilder.createItem(context, collection).withEntityType("Person") + Item personItem = ItemBuilder.createItem(context, personCollection) .withTitle("Person1").build(); List relationshipList = relationshipService.findByItem(context, personItem); assertEquals(0, relationshipList.size()); - Item publicationItem = ItemBuilder.createItem(context, collection).withEntityType("Publication") + Item publicationItem = ItemBuilder.createItem(context, publicationCollection) .withTitle("Publication1").build(); EntityType publication = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); @@ -143,12 +191,12 @@ public void relationshipMetadataImporAlreadyExistingItemTest() throws Exception String[] csv = {"id,collection,relation.isPublicationOfAuthor", - personItem.getID() + "," + collection.getHandle() + "," + publicationItem.getID()}; + personItem.getID() + "," + publicationCollection.getHandle() + "," + publicationItem.getID()}; performImportScript(csv); Item importedItem = findItemByName("Person1"); - assertEquals(relationshipService.findByItem(context, importedItem).size(), 1); + assertEquals(1, relationshipService.findByItem(context, importedItem).size()); } @@ -156,7 +204,7 @@ public void relationshipMetadataImporAlreadyExistingItemTest() throws Exception public void personMetadataImportTest() throws Exception { String[] csv = {"id,collection,dc.title,person.birthDate", - "+," + collection.getHandle() + ",\"Test Import 2\"," + "2000"}; + "+," + publicationCollection.getHandle() + ",\"Test Import 2\"," + "2000"}; performImportScript(csv); Item importedItem = findItemByName("Test Import 2"); assertTrue( @@ -172,7 +220,7 @@ public void personMetadataImportTest() throws Exception { public void metadataImportRemovingValueTest() throws Exception { context.turnOffAuthorisationSystem(); - Item item = ItemBuilder.createItem(context, collection).withAuthor("TestAuthorToRemove").withTitle("title") + Item item = ItemBuilder.createItem(context,personCollection).withAuthor("TestAuthorToRemove").withTitle("title") .build(); context.restoreAuthSystemState(); @@ -182,10 +230,10 @@ public void metadataImportRemovingValueTest() throws Exception { "TestAuthorToRemove")); String[] csv = {"id,collection,dc.title,dc.contributor.author[*]", - item.getID().toString() + "," + collection.getHandle() + "," + item.getName() + ","}; + item.getID().toString() + "," + personCollection.getHandle() + "," + item.getName() + ","}; performImportScript(csv); item = findItemByName("title"); - assertEquals(itemService.getMetadata(item, "dc", "contributor", "author", Item.ANY).size(), 0); + assertEquals(0, itemService.getMetadata(item, "dc", "contributor", "author", Item.ANY).size()); } private Item findItemByName(String name) throws SQLException { @@ -199,10 +247,16 @@ private Item findItemByName(String name) throws SQLException { return importedItem; } + public void performImportScript(String[] csv) throws Exception { + performImportScript(csv, false); + } + /** * Import mocked CSVs to test item creation behavior, deleting temporary file afterward. + * @param csv content for test file. + * @throws java.lang.Exception passed through. */ - public void performImportScript(String[] csv) throws Exception { + public void performImportScript(String[] csv, boolean useTemplate) throws Exception { File csvFile = File.createTempFile("dspace-test-import", "csv"); BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(csvFile), "UTF-8")); for (String csvLine : csv) { @@ -213,6 +267,9 @@ public void performImportScript(String[] csv) throws Exception { String fileLocation = csvFile.getAbsolutePath(); try { String[] args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s"}; + if (useTemplate) { + args = ArrayUtils.add(args, "-t"); + } TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); ScriptLauncher .handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); diff --git a/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java b/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java index 34b075b6e7cf..aee4b4d267cc 100644 --- a/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java +++ b/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java @@ -12,6 +12,7 @@ import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; +import java.io.IOException; import java.io.OutputStreamWriter; import java.sql.SQLException; import java.util.ArrayList; @@ -54,16 +55,20 @@ public class CSVMetadataImportReferenceIT extends AbstractIntegrationTestWithDatabase { //Common collection to utilize for test + private Collection col; private Collection col1; + private Collection col2; - private RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); - private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - + private final RelationshipService relationshipService + = ContentServiceFactory.getInstance().getRelationshipService(); + private final ItemService itemService + = ContentServiceFactory.getInstance().getItemService(); Community parentCommunity; /** - * Setup testing enviorment + * Setup testing environment. + * @throws java.sql.SQLException passed through. */ @Before public void setup() throws SQLException { @@ -72,15 +77,26 @@ public void setup() throws SQLException { .withName("Parent Community") .build(); - col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + col = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + + col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("Person") + .withName("Collection 1") + .build(); + col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("Publication") + .withName("Collection 2") + .build(); context.turnOffAuthorisationSystem(); EntityType publication = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); EntityType person = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); EntityType project = EntityTypeBuilder.createEntityTypeBuilder(context, "Project").build(); - EntityType orgUnit = EntityTypeBuilder.createEntityTypeBuilder(context, "OrgUnit").build(); + EntityTypeBuilder.createEntityTypeBuilder(context, "OrgUnit").build(); RelationshipTypeBuilder .createRelationshipTypeBuilder(context, publication, person, "isAuthorOfPublication", @@ -130,9 +146,24 @@ private void assertRelationship(Item leftItem, Item rightItem, int expectedCount public void testSingleMdRef() throws Exception { String[] csv = {"id,dspace.entity.type,relation.isAuthorOfPublication,collection,dc.identifier.other", "+,Person,," + col1.getHandle() + ",0", - "+,Publication,dc.identifier.other:0," + col1.getHandle() + ",1"}; + "+,Publication,dc.identifier.other:0," + col2.getHandle() + ",1"}; + Item[] items = runImport(csv); + assertRelationship(items[1], items[0], 1, "left", 0); + + // remove created items + cleanupImportItems(items); + } + + @Test + public void testSingleMdRefIntoCollectionWithoutEntityTypeTest() throws Exception { + String[] csv = {"id,dspace.entity.type,relation.isAuthorOfPublication,collection,dc.identifier.other", + "+,Person,," + col.getHandle() + ",0", + "+,Publication,dc.identifier.other:0," + col.getHandle() + ",1"}; Item[] items = runImport(csv); assertRelationship(items[1], items[0], 1, "left", 0); + + // remove created items + cleanupImportItems(items); } /** @@ -151,6 +182,20 @@ private Item[] runImport(String[] csvLines) throws Exception { return items; } + /** + * Delete the Items in the given array. This method is used for cleanup after using "runImport" + * @param items items array + * @throws SQLException + * @throws IOException + */ + private void cleanupImportItems(Item[] items) throws SQLException, IOException { + context.turnOffAuthorisationSystem(); + for (Item item: items) { + ItemBuilder.deleteItem(item.getID()); + } + context.restoreAuthSystemState(); + } + /** * Test existence of newly created item with proper relationships defined in the item's metadata via * a rowName reference @@ -160,9 +205,11 @@ public void testSingleRowNameRef() throws Exception { String[] csv = {"id,dc.title,dspace.entity.type,relation.isAuthorOfPublication,collection,rowName," + "dc.identifier.other", "+,Test Item 1,Person,," + col1.getHandle() + ",idVal,0", - "+,Test Item 2,Publication,rowName:idVal," + col1.getHandle() + ",anything,1"}; + "+,Test Item 2,Publication,rowName:idVal," + col2.getHandle() + ",anything,1"}; Item[] items = runImport(csv); assertRelationship(items[1], items[0], 1, "left", 0); + // remove created items + cleanupImportItems(items); } /** @@ -174,10 +221,12 @@ public void testMultiMdRef() throws Exception { String[] csv = {"id,dspace.entity.type,relation.isAuthorOfPublication,collection,dc.identifier.other", "+,Person,," + col1.getHandle() + ",0", "+,Person,," + col1.getHandle() + ",1", - "+,Publication,dc.identifier.other:0||dc.identifier.other:1," + col1.getHandle() + ",2"}; + "+,Publication,dc.identifier.other:0||dc.identifier.other:1," + col2.getHandle() + ",2"}; Item[] items = runImport(csv); assertRelationship(items[2], items[0], 1, "left", 0); assertRelationship(items[2], items[1], 1, "left", 1); + // remove created items + cleanupImportItems(items); } /** @@ -189,10 +238,12 @@ public void testMultiRowNameRef() throws Exception { String[] csv = {"id,dspace.entity.type,relation.isAuthorOfPublication,collection,dc.identifier.other,rowName", "+,Person,," + col1.getHandle() + ",0,val1", "+,Person,," + col1.getHandle() + ",1,val2", - "+,Publication,rowName:val1||rowName:val2," + col1.getHandle() + ",2,val3"}; + "+,Publication,rowName:val1||rowName:val2," + col2.getHandle() + ",2,val3"}; Item[] items = runImport(csv); assertRelationship(items[2], items[0], 1, "left", 0); assertRelationship(items[2], items[1], 1, "left", 1); + // remove created items + cleanupImportItems(items); } /** @@ -208,13 +259,14 @@ public void testSingleUUIDReference() throws Exception { .withAuthor("Smith, Donald") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("Donald") - .withEntityType("Person") .build(); context.restoreAuthSystemState(); String[] csv = {"id,dspace.entity.type,relation.isAuthorOfPublication,collection,rowName,dc.identifier.other", - "+,Publication," + person.getID().toString() + "," + col1.getHandle() + ",anything,0"}; + "+,Publication," + person.getID().toString() + "," + col2.getHandle() + ",anything,0"}; Item[] items = runImport(csv); assertRelationship(items[0], person, 1, "left", 0); + // remove created items + cleanupImportItems(items); } /** @@ -230,7 +282,6 @@ public void testMultiUUIDReference() throws Exception { .withAuthor("Smith, Donald") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("Donald") - .withEntityType("Person") .build(); Item person2 = ItemBuilder.createItem(context, col1) .withTitle("Author2") @@ -238,14 +289,15 @@ public void testMultiUUIDReference() throws Exception { .withAuthor("Smith, John") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("John") - .withEntityType("Person") .build(); String[] csv = {"id,dspace.entity.type,relation.isAuthorOfPublication,collection,rowName,dc.identifier.other", "+,Publication," + person.getID().toString() + "||" + person2.getID().toString() + "," + - col1.getHandle() + ",anything,0"}; + col2.getHandle() + ",anything,0"}; Item[] items = runImport(csv); assertRelationship(items[0], person, 1, "left", 0); assertRelationship(items[0], person2, 1, "left", 1); + // remove created items + cleanupImportItems(items); } /** @@ -261,16 +313,17 @@ public void testMultiRefArchivedCsv() throws Exception { .withAuthor("Smith, Donald") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("Donald") - .withEntityType("Person") .build(); String[] csv = {"id,dc.title,dspace.entity.type,relation.isAuthorOfPublication,collection,rowName," + "dc.identifier.other", "+,Person2,Person,," + col1.getHandle() + ",idVal,0", - "+,Pub1,Publication,dc.title:Person||dc.title:Person2," + col1.getHandle() + ",anything,1"}; + "+,Pub1,Publication,dc.title:Person||dc.title:Person2," + col2.getHandle() + ",anything,1"}; context.restoreAuthSystemState(); Item[] items = runImport(csv); assertRelationship(items[1], person, 1, "left", 0); assertRelationship(items[1], items[0], 1, "left", 1); + // remove created items + cleanupImportItems(items); } /** @@ -287,7 +340,6 @@ public void testMultiMixedRefArchivedCsv() throws Exception { .withAuthor("Smith, Donald") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("Donald") - .withEntityType("Person") .build(); Item person2 = ItemBuilder.createItem(context, col1) .withTitle("Person2") @@ -295,7 +347,6 @@ public void testMultiMixedRefArchivedCsv() throws Exception { .withAuthor("Smith, John") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("John") - .withEntityType("Person") .build(); context.restoreAuthSystemState(); @@ -303,11 +354,13 @@ public void testMultiMixedRefArchivedCsv() throws Exception { "dc.identifier.other", "+,Person3,Person,," + col1.getHandle() + ",idVal,0", "+,Pub1,Publication," + person.getID() + "||dc.title:Person2||rowName:idVal," + - col1.getHandle() + ",anything,1"}; + col2.getHandle() + ",anything,1"}; Item[] items = runImport(csv); assertRelationship(items[1], person, 1, "left", 0); assertRelationship(items[1], person2, 1, "left", 1); assertRelationship(items[1], items[0], 1, "left", 2); + // remove created items + cleanupImportItems(items); } /** @@ -319,9 +372,11 @@ public void testRefWithSpecialChar() throws Exception { String[] csv = {"id,dc.title,dspace.entity.type,relation.isAuthorOfPublication,collection,rowName," + "dc.identifier.other", "+,Person:,Person,," + col1.getHandle() + ",idVal,0", - "+,Pub1,Publication,dc.title:Person:," + col1.getHandle() + ",anything,1"}; + "+,Pub1,Publication,dc.title:Person:," + col2.getHandle() + ",anything,1"}; Item[] items = runImport(csv); assertRelationship(items[1], items[0], 1, "left", 0); + // remove created items + cleanupImportItems(items); } /** @@ -350,26 +405,25 @@ public void testNonUniqueRowName() throws Exception { /** * Test failure when referring to item by non unique metadata in the database. + * @throws java.lang.Exception passed through. */ @Test(expected = MetadataImportException.class) public void testNonUniqueMDRefInDb() throws Exception { context.turnOffAuthorisationSystem(); - Item person = ItemBuilder.createItem(context, col1) + ItemBuilder.createItem(context, col1) .withTitle("Person") .withIssueDate("2017-10-17") .withAuthor("Smith, Donald") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("Donald") - .withEntityType("Person") .withIdentifierOther("1") .build(); - Item person2 = ItemBuilder.createItem(context, col1) + ItemBuilder.createItem(context, col1) .withTitle("Person2") .withIssueDate("2017-10-17") .withAuthor("Smith, John") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("John") - .withEntityType("Person") .withIdentifierOther("1") .build(); @@ -385,13 +439,12 @@ public void testNonUniqueMDRefInDb() throws Exception { @Test(expected = MetadataImportException.class) public void testNonUniqueMDRefInBoth() throws Exception { context.turnOffAuthorisationSystem(); - Item person = ItemBuilder.createItem(context, col1) + ItemBuilder.createItem(context, col1) .withTitle("Person") .withIssueDate("2017-10-17") .withAuthor("Smith, Donald") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("Donald") - .withEntityType("Person") .withIdentifierOther("1") .build(); context.restoreAuthSystemState(); @@ -402,7 +455,7 @@ public void testNonUniqueMDRefInBoth() throws Exception { } /** - * Test failure when refering to item by metadata that does not exist in the relation column + * Test failure when referring to item by metadata that does not exist in the relation column */ @Test(expected = Exception.class) public void testNonExistMdRef() throws Exception { @@ -413,7 +466,7 @@ public void testNonExistMdRef() throws Exception { } /** - * Test failure when refering to an item in the CSV that hasn't been created yet due to it's order in the CSV + * Test failure when referring to an item in the CSV that hasn't been created yet due to it's order in the CSV */ @Test(expected = Exception.class) public void testCSVImportWrongOrder() throws Exception { @@ -424,7 +477,7 @@ public void testCSVImportWrongOrder() throws Exception { } /** - * Test failure when refering to an item in the CSV that hasn't been created yet due to it's order in the CSV + * Test failure when referring to an item in the CSV that hasn't been created yet due to it's order in the CSV */ @Test(expected = Exception.class) public void testCSVImportWrongOrderRowName() throws Exception { @@ -446,16 +499,26 @@ public void testCSVImportInvalidRelationship() throws Exception { } /** - * Test relationship validation with invalid relationship definition and with an archived origin referer + * Test relationship validation with invalid relationship definition and with an archived origin referrer. */ @Test(expected = MetadataImportInvalidHeadingException.class) public void testInvalidRelationshipArchivedOrigin() throws Exception { context.turnOffAuthorisationSystem(); - Item testItem = ItemBuilder.createItem(context, col1) + + Community rootCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection orgUnitCollection = CollectionBuilder.createCollection(context, rootCommunity) + .withEntityType("OrgUnit") + .withName("Collection 1") + .build(); + + Item testItem = ItemBuilder.createItem(context, orgUnitCollection) .withTitle("OrgUnit") .withIssueDate("2017-10-17") - .withEntityType("OrgUnit") .build(); + context.restoreAuthSystemState(); String[] csv = {"id,dspace.entity.type,relation.isAuthorOfPublication,collection,rowName", "+,Person,," + col1.getHandle() + ",1" + @@ -469,11 +532,21 @@ public void testInvalidRelationshipArchivedOrigin() throws Exception { @Test(expected = MetadataImportInvalidHeadingException.class) public void testInvalidRelationshipArchivedTarget() throws Exception { context.turnOffAuthorisationSystem(); - Item testItem = ItemBuilder.createItem(context, col1) + + Community rootCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection orgUnitCollection = CollectionBuilder.createCollection(context, rootCommunity) + .withEntityType("OrgUnit") + .withName("Collection 1") + .build(); + + Item testItem = ItemBuilder.createItem(context, orgUnitCollection) .withTitle("OrgUnit") .withIssueDate("2017-10-17") - .withEntityType("OrgUnit") .build(); + context.restoreAuthSystemState(); String[] csv = {"id,dspace.entity.type,relation.isAuthorOfPublication,collection,rowName", testItem.getID().toString() + ",Person,," + col1.getHandle() + ",1" + @@ -488,29 +561,40 @@ public void testInvalidRelationshipArchivedTarget() throws Exception { public void testValidRelationshipNoDefinedTypesInCSV() throws Exception { context.turnOffAuthorisationSystem(); + Community rootCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection publicationCollection = CollectionBuilder.createCollection(context, rootCommunity) + .withEntityType("Publication") + .withName("Collection 1") + .build(); + + Collection projectCollection = CollectionBuilder.createCollection(context, rootCommunity) + .withEntityType("Project") + .withName("Collection 1") + .build(); + Item testItem = ItemBuilder.createItem(context, col1) .withTitle("Person") .withIssueDate("2017-10-17") .withAuthor("Smith, Donald") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("Donald") - .withEntityType("Person") .withIdentifierOther("testItemOne") .build(); - Item testItem2 = ItemBuilder.createItem(context, col1) + Item testItem2 = ItemBuilder.createItem(context, publicationCollection) .withTitle("Publication") .withIssueDate("2017-10-17") - .withEntityType("Publication") .withIdentifierOther("testItemTwo") .build(); - Item testItem3 = ItemBuilder.createItem(context, col1) + Item testItem3 = ItemBuilder.createItem(context, projectCollection) .withTitle("Project") .withIssueDate("2017-10-17") - .withEntityType("Project") .withIdentifierOther("testItemThree") .build(); @@ -532,11 +616,13 @@ public void testValidRelationshipNoDefinedTypesInCSV() throws Exception { public void testDuplicateRowNameReferences() throws Exception { String[] csv = {"id,dspace.entity.type,relation.isAuthorOfPublication,collection,dc.identifier.other,rowName", "+,Person,," + col1.getHandle() + ",0,value", - "+,Publication,rowName:value," + col1.getHandle() + ",1,1", - "+,Publication,rowName:value," + col1.getHandle() + ",2,2"}; + "+,Publication,rowName:value," + col2.getHandle() + ",1,1", + "+,Publication,rowName:value," + col2.getHandle() + ",2,2"}; Item[] items = runImport(csv); assertRelationship(items[1], items[0], 1, "left", 0); assertRelationship(items[2], items[0], 1, "left", 0); + // remove created items + cleanupImportItems(items); } @Test @@ -548,13 +634,12 @@ public void testRelationToVirtualDataInReferences() throws Exception { .withAuthor("Smith, Donald") .withPersonIdentifierLastName("Smith") .withPersonIdentifierFirstName("Donald") - .withEntityType("Person") .withIdentifierOther("testItemOne") .build(); String[] csv = {"id,dspace.entity.type,relation.isAuthorOfPublication,collection,dc.identifier.other,rowName", - "+,Publication," + testItem.getID() + "::virtual::4::600," + col1.getHandle() + ",0,1"}; + "+,Publication," + testItem.getID() + "::virtual::4::600," + col2.getHandle() + ",0,1"}; Item[] items = runImport(csv); assertRelationship(items[0], testItem, 1, "left", 0); } @@ -566,11 +651,20 @@ public void testRelationToVirtualDataInReferences() throws Exception { public void testInvalidTypeNameDefined() throws Exception { context.turnOffAuthorisationSystem(); - Item testItem = ItemBuilder.createItem(context, col1) + Community rootCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection publicationCollection = CollectionBuilder.createCollection(context, rootCommunity) + .withEntityType("Publication") + .withName("Collection 1") + .build(); + + Item testItem = ItemBuilder.createItem(context, publicationCollection) .withTitle("Publication") .withIssueDate("2017-10-17") - .withEntityType("Publication") .build(); + context.restoreAuthSystemState(); String[] csv = {"id,collection,dspace.entity.type,dc.title," + "relation.isProjectOfPublication,relation.isPublicationOfProject", @@ -608,8 +702,10 @@ public int performImportScript(String[] csv, boolean validateOnly) throws Except script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue + .equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } if (testDSpaceRunnableHandler.getException() != null) { throw testDSpaceRunnableHandler.getException(); @@ -641,5 +737,4 @@ private UUID getUUIDByIdentifierOther(String value) throws Exception { return uuidList.get(0); } - } diff --git a/dspace-api/src/test/java/org/dspace/app/itemexport/ItemExportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemexport/ItemExportCLIIT.java new file mode 100644 index 000000000000..6db37bdbcd05 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/itemexport/ItemExportCLIIT.java @@ -0,0 +1,363 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemexport; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.stream.Collectors; + +import org.apache.commons.codec.CharEncoding; +import org.apache.commons.io.IOUtils; +import org.apache.commons.io.file.PathUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.ItemService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Basic integration testing for the SAF Export feature via CLI {@link ItemExportCLI}. + * https://wiki.lyrasis.org/display/DSDOC7x/Importing+and+Exporting+Items+via+Simple+Archive+Format + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemExportCLIIT extends AbstractIntegrationTestWithDatabase { + + private static final String zipFileName = "saf-export.zip"; + private static final String title = "A Tale of Two Cities"; + private static final String dateIssued = "1990"; + private static final String titleAlternative = "J'aime les Printemps"; + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private Collection collection; + private Path tempDir; + private Path workDir; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Publication") + .build(); + context.restoreAuthSystemState(); + + tempDir = Files.createTempDirectory("safExportTest"); + File file = new File(configurationService.getProperty("org.dspace.app.itemexport.work.dir")); + if (!file.exists()) { + Files.createDirectory(Path.of(file.getAbsolutePath())); + } + workDir = Path.of(file.getAbsolutePath()); + } + + @After + @Override + public void destroy() throws Exception { + PathUtils.deleteDirectory(tempDir); + for (Path path : Files.list(workDir).collect(Collectors.toList())) { + PathUtils.delete(path); + } + super.destroy(); + } + + @Test + public void exportCollection() throws Exception { + // create items + context.turnOffAuthorisationSystem(); + Item item1 = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + Item item2 = ItemBuilder.createItem(context, collection) + .withTitle(title + " 2") + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "COLLECTION", + "-i", collection.getHandle(), "-d", tempDir.toString(), "-n", "1" }; + perfomExportScript(args); + + checkDir(); + } + + @Test + public void exportZipCollection() throws Exception { + // create items + context.turnOffAuthorisationSystem(); + Item item1 = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + Item item2 = ItemBuilder.createItem(context, collection) + .withTitle(title + " 2") + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "COLLECTION", + "-i", collection.getHandle(), "-d", tempDir.toString(), "-z", zipFileName, "-n", "1" }; + perfomExportScript(args); + + checkDir(); + checkZip(zipFileName); + } + + @Test + public void exportItemWithMetadataOnly() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1" }; + perfomExportScript(args); + + checkDir(); + } + + @Test + public void exportItemWithBitstreams() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + // create bitstream + String bitstreamContent = "TEST TEST TEST"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1" }; + perfomExportScript(args); + + checkDir(); + } + + @Test + public void exportItemWithAnotherMetadataSchema() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .withMetadata("dcterms", "title", "", title) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1" }; + perfomExportScript(args); + + checkDir(); + } + + @Test + public void exportZipItemWithBitstreams() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + // create bitstream + String bitstreamContent = "TEST TEST TEST"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-z", zipFileName, "-n", "1" }; + perfomExportScript(args); + + checkDir(); + checkZip(zipFileName); + } + + @Test + public void migrateCollection() throws Exception { + // create items + context.turnOffAuthorisationSystem(); + Item item1 = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + Item item2 = ItemBuilder.createItem(context, collection) + .withTitle(title + " 2") + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "COLLECTION", + "-i", collection.getHandle(), "-d", tempDir.toString(), "-n", "1", "-m" }; + perfomExportScript(args); + + checkDir(); + checkCollectionMigration(); + checkItemMigration(item1); + checkItemMigration(item2); + } + + @Test + public void migrateItemWithMetadataOnly() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1", "-m" }; + perfomExportScript(args); + + checkDir(); + checkItemMigration(item); + } + + @Test + public void migrateItemWithBitstreams() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .build(); + // create bitstream + String bitstreamContent = "TEST TEST TEST"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, is) + .withName("Bitstream") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1", "-m" }; + perfomExportScript(args); + + checkDir(); + checkItemMigration(item); + } + + @Test + public void migrateItemWithAnotherMetadataSchema() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(title) + .withMetadata("dc", "date", "issued", dateIssued) + .withMetadata("dc", "title", "alternative", titleAlternative) + .withMetadata("dcterms", "title", "", title) + .build(); + context.restoreAuthSystemState(); + + String[] args = new String[] { "export", "-t", "ITEM", + "-i", item.getHandle(), "-d", tempDir.toString(), "-n", "1", "-m" }; + perfomExportScript(args); + + checkDir(); + checkItemMigration(item); + } + + /** + * Check created export directory + * @throws Exception + */ + private void checkDir() throws Exception { + assertTrue(Files.list(tempDir).findAny().isPresent()); + } + + /** + * Check created export zip + * @param zipFileName + * @throws Exception + */ + private void checkZip(String zipFileName) throws Exception { + assertEquals(1, + Files.list(tempDir) + .filter(b -> StringUtils.equals(b.getFileName().toString(), zipFileName)) + .count()); + } + + /** + * Check migration of collection + * @throws Exception + */ + private void checkCollectionMigration() throws Exception { + assertNotNull(collectionService.find(context, collection.getID())); + } + + /** + * Check migration of item + * @param item + * @throws Exception + */ + private void checkItemMigration(Item item) throws Exception { + assertNotNull(itemService.find(context, item.getID())); + } + + private void perfomExportScript(String[] args) + throws Exception { + runDSpaceScript(args); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java new file mode 100644 index 000000000000..08ae3af4ae06 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java @@ -0,0 +1,604 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.itemimport; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Iterator; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.commons.io.file.PathUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.Relationship; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.flywaydb.core.internal.util.ExceptionUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Basic integration testing for the SAF Import feature via CLI {@link ItemImportCLI}. + * https://wiki.lyrasis.org/display/DSDOC7x/Importing+and+Exporting+Items+via+Simple+Archive+Format + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { + + private static final String ZIP_NAME = "saf.zip"; + private static final String PDF_NAME = "test.pdf"; + private static final String publicationTitle = "A Tale of Two Cities"; + private static final String personTitle = "Person Test"; + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + private Collection collection; + private Path tempDir; + private Path workDir; + private static final String TEMP_DIR = ItemImport.TEMP_DIR; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Publication") + .build(); + + EntityType publication = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType person = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publication, person, "isAuthorOfPublication", + "isPublicationOfAuthor", 0, null, 0, null) + .withCopyToLeft(false).withCopyToRight(true).build(); + context.restoreAuthSystemState(); + + tempDir = Files.createTempDirectory("safImportTest"); + File file = new File(configurationService.getProperty("org.dspace.app.batchitemimport.work.dir")); + if (!file.exists()) { + Files.createDirectory(Path.of(file.getAbsolutePath())); + } + workDir = Path.of(file.getAbsolutePath()); + } + + @After + @Override + public void destroy() throws Exception { + PathUtils.deleteDirectory(tempDir); + for (Path path : Files.list(workDir).collect(Collectors.toList())) { + PathUtils.delete(path); + } + super.destroy(); + } + + @Test + public void importItemBySafWithMetadataOnly() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", safDir.toString(), "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + } + + @Test + public void importItemBySafWithBitstreams() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add bitstream + Path contentsFile = Files.createFile(Path.of(itemDir.toString() + "/contents")); + Files.writeString(contentsFile, + "file1.txt"); + Path bitstreamFile = Files.createFile(Path.of(itemDir.toString() + "/file1.txt")); + Files.writeString(bitstreamFile, + "TEST TEST TEST"); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", safDir.toString(), "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + checkBitstream(); + } + + @Test + public void importItemBySafWithAnotherMetadataSchema() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add metadata with another schema + Files.copy(getClass().getResourceAsStream("metadata_dcterms.xml"), + Path.of(itemDir.toString() + "/metadata_dcterms.xml")); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", safDir.toString(), "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + checkMetadataWithAnotherSchema(); + } + + @Test + public void importItemsBySafWithRelationships() throws Exception { + context.turnOffAuthorisationSystem(); + // create collection that contains person + Collection collectionPerson = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection Person") + .withEntityType("Person") + .build(); + context.restoreAuthSystemState(); + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path publicationDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.writeString(Path.of(publicationDir.toString() + "/collections"), + collection.getID().toString()); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(publicationDir.toString() + "/dublin_core.xml")); + Files.copy(getClass().getResourceAsStream("relationships"), + Path.of(publicationDir.toString() + "/relationships")); + Path personDir = Files.createDirectory(Path.of(safDir.toString() + "/item_001")); + Files.writeString(Path.of(personDir.toString() + "/collections"), + collectionPerson.getID().toString()); + Files.copy(getClass().getResourceAsStream("dublin_core-person.xml"), + Path.of(personDir.toString() + "/dublin_core.xml")); + + String[] args = new String[] { "import", "-a", "-p", "-e", admin.getEmail(), + "-s", safDir.toString(), "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + checkRelationship(); + } + + @Test + public void importItemsBySafWithRelationshipsByRelationSchema() throws Exception { + context.turnOffAuthorisationSystem(); + // create collection that contains person + Collection collectionPerson = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection Person") + .withEntityType("Person") + .build(); + Item person = ItemBuilder.createItem(context, collectionPerson) + .withTitle(personTitle) + .build(); + context.restoreAuthSystemState(); + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + Files.writeString(Path.of(itemDir.toString() + "/metadata_relation.xml"), + "\n" + + " " + person.getID() + "\n" + + ""); + + String[] args = new String[] { "import", "-a", "-p", "-e", admin.getEmail(), "-c", + collection.getID().toString(), "-s", safDir.toString(), "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkRelationship(); + } + + @Test + public void importItemByZipSafWithBitstreams() throws Exception { + // use simple SAF in zip format + Files.copy(getClass().getResourceAsStream("saf-bitstreams.zip"), + Path.of(tempDir.toString() + "/" + ZIP_NAME)); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", tempDir.toString(), "-z", ZIP_NAME, "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + checkMetadataWithAnotherSchema(); + checkBitstream(); + + // confirm that TEMP_DIR still exists + File workTempDir = new File(workDir + File.separator + TEMP_DIR); + assertTrue(workTempDir.exists()); + } + + @Test + public void importItemByZipSafWithRelationships() throws Exception { + context.turnOffAuthorisationSystem(); + // create collection that contains person + Collection collectionPerson = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection Person") + .withEntityType("Person") + .build(); + // create person + Item person = ItemBuilder.createItem(context, collectionPerson) + .withTitle(personTitle) + .build(); + context.restoreAuthSystemState(); + // use simple SAF in zip format + Files.copy(getClass().getResourceAsStream("saf-relationships.zip"), + Path.of(tempDir.toString() + "/" + ZIP_NAME)); + + String[] args = new String[] { "import", "-a", "-p", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", tempDir.toString(), "-z", ZIP_NAME, + "-m", tempDir.toString() + "/mapfile.out" }; + perfomImportScript(args); + + checkMetadata(); + checkRelationship(); + } + + @Test + public void importItemByZipSafInvalidMimetype() throws Exception { + // use sample PDF file + Files.copy(getClass().getResourceAsStream("test.pdf"), + Path.of(tempDir.toString() + "/" + PDF_NAME)); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString() + + "/mapfile.out" }; + try { + perfomImportScript(args); + } catch (Exception e) { + // should throw an exception due to invalid mimetype + assertEquals(UnsupportedOperationException.class, ExceptionUtils.getRootCause(e).getClass()); + } + } + + @Test + public void resumeImportItemBySafWithMetadataOnly() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + } + + @Test + public void resumeImportItemBySafWithBitstreams() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add bitstream + Path contentsFile = Files.createFile(Path.of(itemDir.toString() + "/contents")); + Files.writeString(contentsFile, + "file1.txt"); + Path bitstreamFile = Files.createFile(Path.of(itemDir.toString() + "/file1.txt")); + Files.writeString(bitstreamFile, + "TEST TEST TEST"); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkBitstream(); + } + + @Test + public void resumeImportItemBySafWithAnotherMetadataSchema() throws Exception { + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add metadata with another schema + Files.copy(getClass().getResourceAsStream("metadata_dcterms.xml"), + Path.of(itemDir.toString() + "/metadata_dcterms.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkMetadataWithAnotherSchema(); + } + + @Test + public void resumeImportItemSkippingTheFirstOneBySafWithMetadataOnly() + throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_001")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + } + + @Test + public void resumeImportItemSkippingTheFirstOneBySafWithBitstreams() + throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_001")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add bitstream + Path contentsFile = Files.createFile(Path.of(itemDir.toString() + "/contents")); + Files.writeString(contentsFile, + "file1.txt"); + Path bitstreamFile = Files.createFile(Path.of(itemDir.toString() + "/file1.txt")); + Files.writeString(bitstreamFile, + "TEST TEST TEST"); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkBitstream(); + } + + @Test + public void resumeImportItemSkippingTheFirstOneBySafWithAnotherMetadataSchema() + throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_001")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add metadata with another schema + Files.copy(getClass().getResourceAsStream("metadata_dcterms.xml"), + Path.of(itemDir.toString() + "/metadata_dcterms.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-a", "-R", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkMetadataWithAnotherSchema(); + } + + @Test + public void replaceItemBySafWithMetadataOnly() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-r", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + } + + @Test + public void replaceItemBySafWithBitstreams() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add bitstream + Path contentsFile = Files.createFile(Path.of(itemDir.toString() + "/contents")); + Files.writeString(contentsFile, + "file1.txt"); + Path bitstreamFile = Files.createFile(Path.of(itemDir.toString() + "/file1.txt")); + Files.writeString(bitstreamFile, + "TEST TEST TEST"); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-r", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkBitstream(); + } + + @Test + public void replaceItemBySafWithAnotherMetadataSchema() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Another Title") + .build(); + context.restoreAuthSystemState(); + + // create simple SAF + Path safDir = Files.createDirectory(Path.of(tempDir.toString() + "/test")); + Path itemDir = Files.createDirectory(Path.of(safDir.toString() + "/item_000")); + Files.copy(getClass().getResourceAsStream("dublin_core.xml"), + Path.of(itemDir.toString() + "/dublin_core.xml")); + // add metadata with another schema + Files.copy(getClass().getResourceAsStream("metadata_dcterms.xml"), + Path.of(itemDir.toString() + "/metadata_dcterms.xml")); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-r", "-e", admin.getEmail(), + "-c", collection.getID().toString(), "-s", safDir.toString(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkMetadata(); + checkMetadataWithAnotherSchema(); + } + + @Test + public void deleteItemByMapFile() throws Exception { + // create item + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle(publicationTitle) + .build(); + context.restoreAuthSystemState(); + // add mapfile + Path mapFile = Files.createFile(Path.of(tempDir.toString() + "/mapfile.out")); + Files.writeString(mapFile, "item_000 " + item.getHandle()); + + String[] args = new String[] { "import", "-d", "-e", admin.getEmail(), + "-m", mapFile.toString() }; + perfomImportScript(args); + + checkItemDeletion(); + } + + /** + * Check metadata on imported item + * @throws Exception + */ + private void checkMetadata() throws Exception { + Item item = itemService.findByMetadataField(context, "dc", "title", null, publicationTitle).next(); + assertEquals(item.getName(), publicationTitle); + assertEquals(itemService.getMetadata(item, "dc.date.issued"), "1990"); + assertEquals(itemService.getMetadata(item, "dc.title.alternative"), "J'aime les Printemps"); + } + + /** + * Check metadata on imported item + * @throws Exception + */ + private void checkMetadataWithAnotherSchema() throws Exception { + Item item = itemService.findByMetadataField(context, "dc", "title", null, publicationTitle).next(); + assertEquals(item.getName(), publicationTitle); + assertEquals(itemService.getMetadata(item, "dcterms.title"), publicationTitle); + } + + /** + * Check bitstreams on imported item + * @throws Exception + */ + private void checkBitstream() throws Exception { + Bitstream bitstream = itemService.findByMetadataField(context, "dc", "title", null, publicationTitle).next() + .getBundles("ORIGINAL").get(0).getBitstreams().get(0); + assertEquals(bitstream.getName(), "file1.txt"); + } + + /** + * Check deletion of item by mapfile + * @throws Exception + */ + private void checkItemDeletion() throws Exception { + Iterator itemIterator = itemService.findByMetadataField(context, "dc", "title", null, publicationTitle); + assertEquals(itemIterator.hasNext(), false); + } + + /** + * Check relationships between imported items + * @throws Exception + */ + private void checkRelationship() throws Exception { + Item item = itemService.findByMetadataField(context, "dc", "title", null, publicationTitle).next(); + Item author = itemService.findByMetadataField(context, "dc", "title", null, personTitle).next(); + List relationships = relationshipService.findByItem(context, item); + assertEquals(1, relationships.size()); + assertEquals(author.getID(), relationships.get(0).getRightItem().getID()); + assertEquals(item.getID(), relationships.get(0).getLeftItem().getID()); + } + + private void perfomImportScript(String[] args) + throws Exception { + runDSpaceScript(args); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/matcher/LambdaMatcher.java b/dspace-api/src/test/java/org/dspace/app/matcher/LambdaMatcher.java new file mode 100644 index 000000000000..f5c00c340d12 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/matcher/LambdaMatcher.java @@ -0,0 +1,55 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.matcher; + +import java.util.function.Predicate; + +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; + +/** + * Matcher based on an {@link Predicate}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * @param the type of the instance to match + */ +public class LambdaMatcher extends BaseMatcher { + + private final Predicate matcher; + private final String description; + + public static LambdaMatcher matches(Predicate matcher) { + return new LambdaMatcher(matcher, "Matches the given predicate"); + } + + public static LambdaMatcher matches(Predicate matcher, String description) { + return new LambdaMatcher(matcher, description); + } + + public static Matcher> has(Predicate matcher) { + return Matchers.hasItem(matches(matcher)); + } + + private LambdaMatcher(Predicate matcher, String description) { + this.matcher = matcher; + this.description = description; + } + + @Override + @SuppressWarnings("unchecked") + public boolean matches(Object argument) { + return matcher.test((T) argument); + } + + @Override + public void describeTo(Description description) { + description.appendText(this.description); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/matcher/OrcidQueueMatcher.java b/dspace-api/src/test/java/org/dspace/app/matcher/OrcidQueueMatcher.java new file mode 100644 index 000000000000..9f83301515ca --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/matcher/OrcidQueueMatcher.java @@ -0,0 +1,136 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.matcher; + +import static org.hamcrest.Matchers.is; + +import org.dspace.content.Item; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.OrcidQueue; +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +/** + * Implementation of {@link org.hamcrest.Matcher} to match a OrcidQueue by all + * its attributes. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidQueueMatcher extends TypeSafeMatcher { + + private final Matcher profileItemMatcher; + + private final Matcher entityMatcher; + + private final Matcher recordTypeMatcher; + + private final Matcher putCodeMatcher; + + private final Matcher descriptionMatcher; + + private final Matcher metadataMatcher; + + private final Matcher operationMatcher; + + private final Matcher attemptsMatcher; + + private OrcidQueueMatcher(Matcher profileItemMatcher, Matcher entityMatcher, + Matcher recordTypeMatcher, Matcher putCodeMatcher, Matcher metadataMatcher, + Matcher descriptionMatcher, Matcher operationMatcher, + Matcher attemptsMatcher) { + this.profileItemMatcher = profileItemMatcher; + this.entityMatcher = entityMatcher; + this.recordTypeMatcher = recordTypeMatcher; + this.putCodeMatcher = putCodeMatcher; + this.metadataMatcher = metadataMatcher; + this.descriptionMatcher = descriptionMatcher; + this.operationMatcher = operationMatcher; + this.attemptsMatcher = attemptsMatcher; + } + + public static OrcidQueueMatcher matches(Item profileItem, Item entity, String recordType, + OrcidOperation operation) { + return new OrcidQueueMatcher(is(profileItem), is(entity), is(recordType), anything(), + anything(), anything(), is(operation), anything()); + } + + public static OrcidQueueMatcher matches(Item profileItem, Item entity, String recordType, + OrcidOperation operation, int attempts) { + return new OrcidQueueMatcher(is(profileItem), is(entity), is(recordType), anything(), + anything(), anything(), is(operation), is(attempts)); + } + + public static OrcidQueueMatcher matches(Item profileItem, Item entity, String recordType, + String putCode, OrcidOperation operation) { + return new OrcidQueueMatcher(is(profileItem), is(entity), is(recordType), is(putCode), + anything(), anything(), is(operation), anything()); + } + + public static OrcidQueueMatcher matches(Item profileItem, Item entity, String recordType, + String putCode, String metadata, String description, OrcidOperation operation) { + return new OrcidQueueMatcher(is(profileItem), is(entity), is(recordType), + is(putCode), is(metadata), is(description), is(operation), anything()); + } + + public static OrcidQueueMatcher matches(Item item, String recordType, + String putCode, String metadata, String description, OrcidOperation operation) { + return new OrcidQueueMatcher(is(item), is(item), is(recordType), + is(putCode), is(metadata), is(description), is(operation), anything()); + } + + public static OrcidQueueMatcher matches(Item profileItem, Item entity, String recordType, + String putCode, Matcher metadata, String description, OrcidOperation operation) { + return new OrcidQueueMatcher(is(profileItem), is(entity), is(recordType), + is(putCode), metadata, is(description), is(operation), anything()); + } + + @Override + public void describeTo(Description description) { + description.appendText("an orcid queue record that with the following attributes:") + .appendText(" item profileItem ").appendDescriptionOf(profileItemMatcher) + .appendText(", item entity ").appendDescriptionOf(entityMatcher) + .appendText(", record type ").appendDescriptionOf(recordTypeMatcher) + .appendText(", metadata ").appendDescriptionOf(metadataMatcher) + .appendText(", description ").appendDescriptionOf(descriptionMatcher) + .appendText(", operation ").appendDescriptionOf(operationMatcher) + .appendText(", attempts ").appendDescriptionOf(attemptsMatcher) + .appendText(" and put code ").appendDescriptionOf(putCodeMatcher); + } + + @Override + protected boolean matchesSafely(OrcidQueue item) { + return profileItemMatcher.matches(item.getProfileItem()) + && entityMatcher.matches(item.getEntity()) + && recordTypeMatcher.matches(item.getRecordType()) + && metadataMatcher.matches(item.getMetadata()) + && putCodeMatcher.matches(item.getPutCode()) + && descriptionMatcher.matches(item.getDescription()) + && operationMatcher.matches(item.getOperation()) + && attemptsMatcher.matches(item.getAttempts()); + } + + private static Matcher anything() { + return new BaseMatcher() { + + @Override + public boolean matches(Object item) { + return true; + } + + @Override + public void describeTo(Description description) { + + } + }; + + } + +} diff --git a/dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java b/dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java new file mode 100644 index 000000000000..26ea7dcb5a35 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/matcher/ResourcePolicyMatcher.java @@ -0,0 +1,126 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.matcher; + +import static org.dspace.util.MultiFormatDateParser.parse; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +import java.util.Date; + +import org.dspace.authorize.ResourcePolicy; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +/** + * Implementation of {@link Matcher} to match a ResourcePolicy. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResourcePolicyMatcher extends TypeSafeMatcher { + + private final Matcher actionId; + + private final Matcher ePerson; + + private final Matcher group; + + private final Matcher rptype; + + private final Matcher rpName; + + private final Matcher description; + + private final Matcher startDate; + + private final Matcher endDate; + + public ResourcePolicyMatcher(Matcher actionId, Matcher ePerson, Matcher group, + Matcher rpName, Matcher rptype, Matcher startDate, + Matcher endDate, Matcher description) { + this.actionId = actionId; + this.ePerson = ePerson; + this.group = group; + this.rptype = rptype; + this.rpName = rpName; + this.description = description; + this.startDate = startDate; + this.endDate = endDate; + } + + @Override + public void describeTo(Description description) { + description.appendText("Resource policy with action id ").appendDescriptionOf(actionId) + .appendText(" and EPerson ").appendDescriptionOf(ePerson) + .appendText(" and Group ").appendDescriptionOf(group) + .appendText(" and rpType ").appendDescriptionOf(rptype) + .appendText(" and rpName ").appendDescriptionOf(rpName) + .appendText(" and description ").appendDescriptionOf(this.description) + .appendText(" and start date ").appendDescriptionOf(startDate) + .appendText(" and end date ").appendDescriptionOf(endDate); + } + + public static ResourcePolicyMatcher matches(int actionId, EPerson ePerson, String rptype) { + return new ResourcePolicyMatcher(is(actionId), is(ePerson), nullValue(Group.class), + any(String.class), is(rptype), any(Date.class), any(Date.class), any(String.class)); + } + + public static ResourcePolicyMatcher matches(int actionId, EPerson ePerson, String rpName, String rptype) { + return new ResourcePolicyMatcher(is(actionId), is(ePerson), nullValue(Group.class), + is(rpName), is(rptype), any(Date.class), any(Date.class), any(String.class)); + } + + public static ResourcePolicyMatcher matches(int actionId, Group group, String rptype) { + return new ResourcePolicyMatcher(is(actionId), nullValue(EPerson.class), is(group), + any(String.class), is(rptype), any(Date.class), any(Date.class), any(String.class)); + } + + public static ResourcePolicyMatcher matches(int actionId, Group group, String rpName, String rptype) { + return new ResourcePolicyMatcher(is(actionId), nullValue(EPerson.class), is(group), is(rpName), + is(rptype), any(Date.class), any(Date.class), any(String.class)); + } + + public static ResourcePolicyMatcher matches(int actionId, Group group, String rpName, String rptype, + String description) { + return new ResourcePolicyMatcher(is(actionId), nullValue(EPerson.class), is(group), is(rpName), + is(rptype), any(Date.class), any(Date.class), is(description)); + } + + public static ResourcePolicyMatcher matches(int actionId, Group group, String rpName, String rpType, Date startDate, + Date endDate, String description) { + return new ResourcePolicyMatcher(is(actionId), nullValue(EPerson.class), is(group), is(rpName), + is(rpType), is(startDate), is(endDate), is(description)); + } + + public static ResourcePolicyMatcher matches(int actionId, Group group, String rpName, String rpType, + String startDate, String endDate, String description) { + return matches(actionId, group, rpName, rpType, startDate != null ? parse(startDate) : null, + endDate != null ? parse(endDate) : null, description); + } + + @Override + protected boolean matchesSafely(ResourcePolicy resourcePolicy) { + return actionId.matches(resourcePolicy.getAction()) + && ePerson.matches(resourcePolicy.getEPerson()) + && group.matches(resourcePolicy.getGroup()) + && rptype.matches(resourcePolicy.getRpType()) + && rpName.matches(resourcePolicy.getRpName()) + && description.matches(resourcePolicy.getRpDescription()) + && startDate.matches(resourcePolicy.getStartDate()) + && endDate.matches(resourcePolicy.getEndDate()); + } + + private static Matcher any(Class clazz) { + return LambdaMatcher.matches((obj) -> true, "any value"); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/app/mediafilter/PoiWordFilterTest.java b/dspace-api/src/test/java/org/dspace/app/mediafilter/PoiWordFilterTest.java deleted file mode 100644 index 4d2353a29ab0..000000000000 --- a/dspace-api/src/test/java/org/dspace/app/mediafilter/PoiWordFilterTest.java +++ /dev/null @@ -1,181 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.mediafilter; - -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; - -import org.dspace.content.Item; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; - -/** - * Drive the POI-based MS Word filter. - * - * @author mwood - */ -public class PoiWordFilterTest { - - public PoiWordFilterTest() { - } - - @BeforeClass - public static void setUpClass() { - } - - @AfterClass - public static void tearDownClass() { - } - - @Before - public void setUp() { - } - - @After - public void tearDown() { - } - - /** - * Test of getFilteredName method, of class PoiWordFilter. - */ -/* - @Test - public void testGetFilteredName() - { - System.out.println("getFilteredName"); - String oldFilename = ""; - PoiWordFilter instance = new PoiWordFilter(); - String expResult = ""; - String result = instance.getFilteredName(oldFilename); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ - - /** - * Test of getBundleName method, of class PoiWordFilter. - */ -/* - @Test - public void testGetBundleName() - { - System.out.println("getBundleName"); - PoiWordFilter instance = new PoiWordFilter(); - String expResult = ""; - String result = instance.getBundleName(); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ - - /** - * Test of getFormatString method, of class PoiWordFilter. - */ -/* - @Test - public void testGetFormatString() - { - System.out.println("getFormatString"); - PoiWordFilter instance = new PoiWordFilter(); - String expResult = ""; - String result = instance.getFormatString(); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ - - /** - * Test of getDescription method, of class PoiWordFilter. - */ -/* - @Test - public void testGetDescription() - { - System.out.println("getDescription"); - PoiWordFilter instance = new PoiWordFilter(); - String expResult = ""; - String result = instance.getDescription(); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ - - /** - * Test of getDestinationStream method, of class PoiWordFilter. - * Read a constant .doc document and examine the extracted text. - * - * @throws java.lang.Exception passed through. - */ - @Test - public void testGetDestinationStreamDoc() - throws Exception { - System.out.println("getDestinationStream"); - Item currentItem = null; - InputStream source; - boolean verbose = false; - PoiWordFilter instance = new PoiWordFilter(); - InputStream result; - - source = getClass().getResourceAsStream("wordtest.doc"); - result = instance.getDestinationStream(currentItem, source, verbose); - assertTrue("Known content was not found", readAll(result).contains("quick brown fox")); - } - - /** - * Test of getDestinationStream method, of class PoiWordFilter. - * Read a constant .docx document and examine the extracted text. - * - * @throws java.lang.Exception passed through. - */ - @Test - public void testGetDestinationStreamDocx() - throws Exception { - System.out.println("getDestinationStream"); - Item currentItem = null; - InputStream source; - boolean verbose = false; - PoiWordFilter instance = new PoiWordFilter(); - InputStream result; - - source = getClass().getResourceAsStream("wordtest.docx"); - result = instance.getDestinationStream(currentItem, source, verbose); - assertTrue("Known content was not found", readAll(result).contains("quick brown fox")); - } - - /** - * Read the entire content of a stream into a String. - * - * @param stream a stream of UTF-8 characters. - * @return complete content of {@link stream} - * @throws IOException - */ - private static String readAll(InputStream stream) - throws IOException { - if (null == stream) { - return null; - } - - byte[] bytes = new byte[stream.available()]; - StringBuilder resultSb = new StringBuilder(bytes.length / 2); // Guess: average 2 bytes per character - int howmany; - while ((howmany = stream.read(bytes)) > 0) { - resultSb.append(new String(bytes, 0, howmany, StandardCharsets.UTF_8)); - } - return resultSb.toString(); - } -} diff --git a/dspace-api/src/test/java/org/dspace/app/mediafilter/TikaTextExtractionFilterTest.java b/dspace-api/src/test/java/org/dspace/app/mediafilter/TikaTextExtractionFilterTest.java new file mode 100644 index 000000000000..9db1ef77768b --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/mediafilter/TikaTextExtractionFilterTest.java @@ -0,0 +1,323 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.mediafilter; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.io.IOUtils; +import org.dspace.AbstractUnitTest; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Test; + +/** + * Test the TikaTextExtractionFilter using test files for all major formats. + * The test files used below are all located at [dspace-api]/src/test/resources/org/dspace/app/mediafilter/ + * + * @author mwood + * @author Tim Donohue + */ +public class TikaTextExtractionFilterTest extends AbstractUnitTest { + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + /** + * Test of getDestinationStream method using temp file for text extraction + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithUseTempFile() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + // Extract text from file with "use-temp-file=true" + configurationService.setProperty("textextractor.use-temp-file", "true"); + InputStream source = getClass().getResourceAsStream("test.pdf"); + InputStream result = instance.getDestinationStream(null, source, false); + String tempFileExtractedText = readAll(result); + + // Verify text extracted successfully + assertTrue("Known content was not found in .pdf", tempFileExtractedText.contains("quick brown fox")); + + // Now, extract text from same file using default, in-memory + configurationService.setProperty("textextractor.use-temp-file", "false"); + source = getClass().getResourceAsStream("test.pdf"); + result = instance.getDestinationStream(null, source, false); + String inMemoryExtractedText = readAll(result); + + // Verify the two results are equal + assertEquals("Extracted text via temp file is the same as in-memory.", + inMemoryExtractedText, tempFileExtractedText); + } + + /** + * Test of getDestinationStream method when max characters is less than file size + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithMaxChars() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + // Set "max-chars" to a small value of 100 chars, which is less than the text size of the file. + configurationService.setProperty("textextractor.max-chars", "100"); + InputStream source = getClass().getResourceAsStream("test.pdf"); + InputStream result = instance.getDestinationStream(null, source, false); + String extractedText = readAll(result); + + // Verify we have exactly the first 100 characters + assertEquals(100, extractedText.length()); + // Verify it has some text at the beginning of the file, but NOT text near the end + assertTrue("Known beginning content was found", extractedText.contains("This is a text.")); + assertFalse("Known ending content was not found", extractedText.contains("Emergency Broadcast System")); + } + + /** + * Test of getDestinationStream method using older Microsoft Word document. + * Read a constant .doc document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithDoc() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.doc"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .doc", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using newer Microsoft Word document. + * Read a constant .docx document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithDocx() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.docx"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .docx", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using an ODT document + * Read a constant .odt document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithODT() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.odt"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .odt", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using an RTF document + * Read a constant .rtf document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithRTF() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.rtf"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .rtf", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using a PDF document + * Read a constant .pdf document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithPDF() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.pdf"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .pdf", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using an HTML document + * Read a constant .html document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithHTML() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.html"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .html", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using a TXT document + * Read a constant .txt document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithTxt() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.txt"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .txt", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using a CSV document + * Read a constant .csv document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithCsv() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.csv"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .csv", readAll(result).contains("data3,3")); + } + + /** + * Test of getDestinationStream method using an XLS document + * Read a constant .xls document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithXLS() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.xls"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .xls", readAll(result).contains("data3,3")); + } + + /** + * Test of getDestinationStream method using an XLSX document + * Read a constant .xlsx document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithXLSX() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.xlsx"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .xlsx", readAll(result).contains("data3,3")); + } + + /** + * Test of getDestinationStream method using an ODS document + * Read a constant .ods document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithODS() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.ods"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .ods", readAll(result).contains("Data on the second sheet")); + } + + /** + * Test of getDestinationStream method using an PPT document + * Read a constant .ppt document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithPPT() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.ppt"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .ppt", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using an PPTX document + * Read a constant .pptx document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithPPTX() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.pptx"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .pptx", readAll(result).contains("quick brown fox")); + } + + /** + * Test of getDestinationStream method using an ODP document + * Read a constant .odp document and examine the extracted text. + * + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetDestinationStreamWithODP() + throws Exception { + TikaTextExtractionFilter instance = new TikaTextExtractionFilter(); + + InputStream source = getClass().getResourceAsStream("test.odp"); + InputStream result = instance.getDestinationStream(null, source, false); + assertTrue("Known content was not found in .odp", readAll(result).contains("quick brown fox")); + } + + /** + * Read the entire content of a stream into a String. + * + * @param stream a stream of UTF-8 characters. + * @return complete content of stream as a String + * @throws IOException + */ + private static String readAll(InputStream stream) + throws IOException { + return IOUtils.toString(stream, StandardCharsets.UTF_8); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/packager/PackagerIT.java b/dspace-api/src/test/java/org/dspace/app/packager/PackagerIT.java new file mode 100644 index 000000000000..7d808ab8715c --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/packager/PackagerIT.java @@ -0,0 +1,206 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.packager; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.util.Iterator; +import java.util.UUID; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; + +import com.google.common.collect.Iterators; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.crosswalk.MetadataValidationException; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.packager.METSManifest; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.jdom2.Element; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Basic integration testing for the Packager restore feature + * + * @author Nathan Buckingham + */ +public class PackagerIT extends AbstractIntegrationTestWithDatabase { + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); + private WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected static final InstallItemService installItemService = ContentServiceFactory.getInstance() + .getInstallItemService(); + protected ConfigurationService configService = DSpaceServicesFactory.getInstance().getConfigurationService(); + protected Community child1; + protected Collection col1; + protected Item article; + File tempFile; + + @Before + public void setup() throws IOException { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 2") + .withEntityType("Publication").build(); + + // Create a new Publication (which is an Article) + article = ItemBuilder.createItem(context, col1) + .withTitle("Article") + .withIssueDate("2017-10-17") + .build(); + + tempFile = File.createTempFile("packagerExportTest", ".zip"); + context.restoreAuthSystemState(); + } + + @After + @Override + public void destroy() throws Exception { + tempFile.delete(); + super.destroy(); + } + + @Test + public void packagerExportUUIDTest() throws Exception { + context.turnOffAuthorisationSystem(); + + performExportScript(article.getHandle(), tempFile); + assertTrue(tempFile.length() > 0); + String idStr = getID(); + assertEquals(idStr, article.getID().toString()); + } + + @Test + public void packagerImportUUIDTest() throws Exception { + context.turnOffAuthorisationSystem(); + + //Item + performExportScript(article.getHandle(), tempFile); + String idStr = getID(); + itemService.delete(context, article); + performImportScript(tempFile); + Item item = itemService.find(context, UUID.fromString(idStr)); + assertNotNull(item); + } + + @Test + public void packagerImportColUUIDTest() throws Exception { + context.turnOffAuthorisationSystem(); + configService.setProperty("upload.temp.dir",tempFile.getParent()); + + performExportScript(col1.getHandle(), tempFile); + String idStr = getID(); + collectionService.delete(context, col1); + performImportScript(tempFile); + Collection collection = collectionService.find(context, UUID.fromString(idStr)); + assertNotNull(collection); + } + + @Test + public void packagerImportComUUIDTest() throws Exception { + context.turnOffAuthorisationSystem(); + configService.setProperty("upload.temp.dir",tempFile.getParent()); + + //Community + performExportScript(child1.getHandle(), tempFile); + String idStr = getID(); + communityService.delete(context, child1); + performImportScript(tempFile); + Community community = communityService.find(context, UUID.fromString(idStr)); + assertNotNull(community); + } + + @Test + public void packagerUUIDAlreadyExistTest() throws Exception { + context.turnOffAuthorisationSystem(); + + //Item should be overwritten if UUID already Exists + performExportScript(article.getHandle(), tempFile); + performImportScript(tempFile); + Iterator items = itemService.findByCollection(context, col1); + assertEquals(1, Iterators.size(items)); + } + + @Test + public void packagerUUIDAlreadyExistWithoutForceTest() throws Exception { + context.turnOffAuthorisationSystem(); + //should fail to restore the item because the uuid already exists. + performExportScript(article.getHandle(), tempFile); + UUID id = article.getID(); + itemService.delete(context, article); + WorkspaceItem workspaceItem = workspaceItemService.create(context, col1, id, false); + installItemService.installItem(context, workspaceItem, "123456789/0100"); + performImportNoForceScript(tempFile); + Iterator items = itemService.findByCollection(context, col1); + Item testItem = items.next(); + assertFalse(items.hasNext()); //check to make sure there is only 1 item + assertEquals("123456789/0100", testItem.getHandle()); //check to make sure the item wasn't overwritten as + // it would have the old handle. + itemService.delete(context, testItem); + } + + private String getID() throws IOException, MetadataValidationException { + //this method gets the UUID from the mets file thats stored in the attribute element + METSManifest manifest = null; + ZipFile zip = new ZipFile(tempFile); + ZipEntry manifestEntry = zip.getEntry(METSManifest.MANIFEST_FILE); + if (manifestEntry != null) { + // parse the manifest and sanity-check it. + manifest = METSManifest.create(zip.getInputStream(manifestEntry), + false, "AIP"); + } + Element mets = manifest.getMets(); + String idStr = mets.getAttributeValue("ID"); + if (idStr.contains("DB-ID-")) { + idStr = idStr.substring(idStr.lastIndexOf("DB-ID-") + 6, idStr.length()); + } + return idStr; + } + + + private void performExportScript(String handle, File outputFile) throws Exception { + runDSpaceScript("packager", "-d", "-e", "admin@email.com", "-i", handle, "-t", + "AIP", outputFile.getPath()); + } + + private void performImportNoForceScript(File outputFile) throws Exception { + runDSpaceScript("packager", "-r", "-u", "-e", "admin@email.com", "-t", + "AIP", outputFile.getPath()); + } + + private void performImportScript(File outputFile) throws Exception { + runDSpaceScript("packager", "-r", "-f", "-u", "-e", "admin@email.com", "-t", + "AIP", outputFile.getPath()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategyTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategyTest.java new file mode 100644 index 000000000000..37292e91c852 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/CollectionAdministratorsRequestItemStrategyTest.java @@ -0,0 +1,62 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import static org.junit.Assert.assertEquals; + +import java.util.List; + +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.junit.Test; +import org.mockito.Mockito; + +/** + * + * @author Mark H. Wood + */ +public class CollectionAdministratorsRequestItemStrategyTest { + private static final String NAME = "John Q. Public"; + private static final String EMAIL = "jqpublic@example.com"; + + /** + * Test of getRequestItemAuthor method, of class CollectionAdministratorsRequestItemStrategy. + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetRequestItemAuthor() + throws Exception { + System.out.println("getRequestItemAuthor"); + + Context context = Mockito.mock(Context.class); + + EPerson eperson1 = Mockito.mock(EPerson.class); + Mockito.when(eperson1.getEmail()).thenReturn(EMAIL); + Mockito.when(eperson1.getFullName()).thenReturn(NAME); + + Group group1 = Mockito.mock(Group.class); + Mockito.when(group1.getMembers()).thenReturn(List.of(eperson1)); + + Collection collection1 = Mockito.mock(Collection.class); + Mockito.when(collection1.getAdministrators()).thenReturn(group1); + + Item item = Mockito.mock(Item.class); + Mockito.when(item.getOwningCollection()).thenReturn(collection1); + Mockito.when(item.getSubmitter()).thenReturn(eperson1); + + CollectionAdministratorsRequestItemStrategy instance = new CollectionAdministratorsRequestItemStrategy(); + List result = instance.getRequestItemAuthor(context, + item); + assertEquals("Should be one author", 1, result.size()); + assertEquals("Name should match " + NAME, NAME, result.get(0).getFullName()); + assertEquals("Email should match " + EMAIL, EMAIL, result.get(0).getEmail()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/CombiningRequestItemStrategyTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/CombiningRequestItemStrategyTest.java new file mode 100644 index 000000000000..c5475612cb31 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/CombiningRequestItemStrategyTest.java @@ -0,0 +1,53 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; + +import java.util.List; + +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.junit.Test; +import org.mockito.Mockito; + +/** + * + * @author Mark H. Wood + */ +public class CombiningRequestItemStrategyTest { + /** + * Test of getRequestItemAuthor method, of class CombiningRequestItemStrategy. + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetRequestItemAuthor() + throws Exception { + System.out.println("getRequestItemAuthor"); + Context context = null; + + Item item = Mockito.mock(Item.class); + RequestItemAuthor author1 = new RequestItemAuthor("Pat Paulsen", "ppaulsen@example.com"); + RequestItemAuthor author2 = new RequestItemAuthor("Alfred E. Neuman", "aeneuman@example.com"); + RequestItemAuthor author3 = new RequestItemAuthor("Alias Undercover", "aundercover@example.com"); + + RequestItemAuthorExtractor strategy1 = Mockito.mock(RequestItemHelpdeskStrategy.class); + Mockito.when(strategy1.getRequestItemAuthor(context, item)).thenReturn(List.of(author1)); + + RequestItemAuthorExtractor strategy2 = Mockito.mock(RequestItemMetadataStrategy.class); + Mockito.when(strategy2.getRequestItemAuthor(context, item)).thenReturn(List.of(author2, author3)); + + List strategies = List.of(strategy1, strategy2); + + CombiningRequestItemStrategy instance = new CombiningRequestItemStrategy(strategies); + List result = instance.getRequestItemAuthor(context, + item); + assertThat(result, containsInAnyOrder(author1, author2, author3)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/JavaMailTestTransport.java b/dspace-api/src/test/java/org/dspace/app/requestitem/JavaMailTestTransport.java new file mode 100644 index 000000000000..96cf00c312ba --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/JavaMailTestTransport.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import javax.mail.Address; +import javax.mail.Message; +import javax.mail.MessagingException; +import javax.mail.Session; +import javax.mail.Transport; +import javax.mail.URLName; + +/** + * A dummy load for SMTP transport, which saves the last message "sent" for + * later inspection. See the {@link getMessage()} and {@link getAddresses()} + * methods for access to the message. Sending a new message through an instance + * of this Transport discards the previous message. + * + *

    This class is not thread-safe. + * + * @author mwood + */ +public class JavaMailTestTransport + extends Transport { + private static Message msg; + private static Address[] adrss; + + public JavaMailTestTransport(Session session, URLName urlname) { + super(session, urlname); + } + + @Override + public void sendMessage(Message aMsg, Address[] aAdrss) + throws MessagingException { + msg = aMsg; + adrss = aAdrss; + } + + @Override + public void connect(String host, int port, String user, String password) { } + + /* *** Implementation-specific methods. *** */ + + /** + * Access the most recent saved message. + * + * @return saved message. + */ + public static Message getMessage() { + return msg; + } + + /** + * Access the most recent saved addresses. + * + * @return saved addresses. + */ + public static Address[] getAddresses() { + return adrss; + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java new file mode 100644 index 000000000000..713e007c58a2 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemEmailNotifierTest.java @@ -0,0 +1,271 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; +import static org.junit.Assert.assertEquals; + +import javax.mail.Address; +import javax.mail.Message; +import javax.mail.Provider; +import javax.mail.Session; +import javax.mail.internet.InternetAddress; + +import org.dspace.AbstractUnitTest; +import org.dspace.app.requestitem.factory.RequestItemServiceFactory; +import org.dspace.app.requestitem.service.RequestItemService; +import org.dspace.builder.AbstractBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; + +/** + * Tests for {@link RequestItemEmailNotifier}. + * + * @author mwood + */ +public class RequestItemEmailNotifierTest + extends AbstractUnitTest { + + public static final String TRANSPORT_CLASS_KEY = "mail.smtp.class"; + + private static final String REQUESTOR_ADDRESS = "mhwood@wood.net"; + private static final String REQUESTOR_NAME = "Mark Wood"; + private static final String HELPDESK_ADDRESS = "help@example.com"; + private static final String HELPDESK_NAME = "Help Desk"; + private static final String TEST_MESSAGE = "Message"; + private static final String DUMMY_PROTO = "dummy"; + + private static ConfigurationService configurationService; + private static BitstreamService bitstreamService; + private static HandleService handleService; + private static RequestItemService requestItemService; + + public RequestItemEmailNotifierTest() { + super(); + } + + @BeforeClass + public static void setUpClass() { + AbstractBuilder.init(); // AbstractUnitTest doesn't do this for us. + + configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); + bitstreamService + = ContentServiceFactory.getInstance().getBitstreamService(); + handleService + = HandleServiceFactory.getInstance().getHandleService(); + requestItemService + = RequestItemServiceFactory.getInstance().getRequestItemService(); + } + + /** + * Test of sendRequest method, of class RequestItemEmailNotifier. + * @throws java.lang.Exception passed through. + */ + @Ignore + @Test + public void testSendRequest() throws Exception { + } + + /** + * Test of sendResponse method, of class RequestItemEmailNotifier. + * @throws java.lang.Exception passed through. + */ + @Test + public void testSendResponse() throws Exception { + // Create some content to send. + context.turnOffAuthorisationSystem(); + Community com = CommunityBuilder.createCommunity(context) + .withName("Top Community") + .build(); + Collection col = CollectionBuilder.createCollection(context, com) + .build(); + Item item = ItemBuilder.createItem(context, col) + .withTitle("Test Item") + .build(); + context.restoreAuthSystemState(); + + // Create a request to which we can respond. + RequestItem ri = new RequestItem(); + ri.setAccept_request(true); + ri.setItem(item); + ri.setAllfiles(true); + ri.setReqEmail(REQUESTOR_ADDRESS); + ri.setReqName(REQUESTOR_NAME); + + // Install a fake transport for RFC2822 email addresses. + Session session = DSpaceServicesFactory.getInstance().getEmailService().getSession(); + Provider transportProvider = new Provider(Provider.Type.TRANSPORT, + DUMMY_PROTO, JavaMailTestTransport.class.getCanonicalName(), + "DSpace", "1.0"); + session.addProvider(transportProvider); + session.setProvider(transportProvider); + session.setProtocolForAddress("rfc822", DUMMY_PROTO); + + // Configure the help desk strategy. + configurationService.setProperty("mail.helpdesk", HELPDESK_ADDRESS); + configurationService.setProperty("mail.helpdesk.name", HELPDESK_NAME); + configurationService.setProperty("request.item.helpdesk.override", "true"); + + // Ensure that mail is "sent". + configurationService.setProperty("mail.server.disabled", "false"); + + // Instantiate and initialize the unit, using the "help desk" strategy. + RequestItemEmailNotifier requestItemEmailNotifier + = new RequestItemEmailNotifier( + DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServiceByName(RequestItemHelpdeskStrategy.class.getName(), + RequestItemAuthorExtractor.class)); + requestItemEmailNotifier.bitstreamService = bitstreamService; + requestItemEmailNotifier.configurationService = configurationService; + requestItemEmailNotifier.handleService = handleService; + requestItemEmailNotifier.requestItemService = requestItemService; + + // Test the unit. Template supplies the Subject: value + requestItemEmailNotifier.sendResponse(context, ri, null, TEST_MESSAGE); + + // Evaluate the test results. + + // Check the To: address. + Address[] myAddresses = JavaMailTestTransport.getAddresses(); + assertEquals("Should have one To: address.", + myAddresses.length, 1); + assertThat("To: should be an Internet address", + myAddresses[0], instanceOf(InternetAddress.class)); + String address = ((InternetAddress)myAddresses[0]).getAddress(); + assertEquals("To: address should match requestor.", + ri.getReqEmail(), address); + + // Check the message body. + Message myMessage = JavaMailTestTransport.getMessage(); + + Object content = myMessage.getContent(); + assertThat("Body should be a single text bodypart", + content, instanceOf(String.class)); + + assertThat("Should contain the helpdesk name", + (String)content, containsString(HELPDESK_NAME)); + + assertThat("Should contain the test custom message", + (String)content, containsString(TEST_MESSAGE)); + } + + /** + * Test of sendResponse method -- rejection case. + * @throws java.lang.Exception passed through. + */ + @Test + public void testSendRejection() + throws Exception { + // Create some content to send. + context.turnOffAuthorisationSystem(); + Community com = CommunityBuilder.createCommunity(context) + .withName("Top Community") + .build(); + Collection col = CollectionBuilder.createCollection(context, com) + .build(); + Item item = ItemBuilder.createItem(context, col) + .withTitle("Test Item") + .build(); + context.restoreAuthSystemState(); + + // Create a request to which we can respond. + RequestItem ri = new RequestItem(); + ri.setAccept_request(false); + ri.setItem(item); + ri.setAllfiles(true); + ri.setReqEmail(REQUESTOR_ADDRESS); + ri.setReqName(REQUESTOR_NAME); + + // Install a fake transport for RFC2822 email addresses. + Session session = DSpaceServicesFactory.getInstance().getEmailService().getSession(); + Provider transportProvider = new Provider(Provider.Type.TRANSPORT, + DUMMY_PROTO, JavaMailTestTransport.class.getCanonicalName(), + "DSpace", "1.0"); + session.addProvider(transportProvider); + session.setProvider(transportProvider); + session.setProtocolForAddress("rfc822", DUMMY_PROTO); + + // Configure the help desk strategy. + configurationService.setProperty("mail.helpdesk", HELPDESK_ADDRESS); + configurationService.setProperty("mail.helpdesk.name", HELPDESK_NAME); + configurationService.setProperty("request.item.helpdesk.override", "true"); + + // Ensure that mail is "sent". + configurationService.setProperty("mail.server.disabled", "false"); + + // Instantiate and initialize the unit, using the "help desk" strategy. + RequestItemEmailNotifier requestItemEmailNotifier + = new RequestItemEmailNotifier( + DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServiceByName(RequestItemHelpdeskStrategy.class.getName(), + RequestItemAuthorExtractor.class)); + requestItemEmailNotifier.bitstreamService = bitstreamService; + requestItemEmailNotifier.configurationService = configurationService; + requestItemEmailNotifier.handleService = handleService; + requestItemEmailNotifier.requestItemService = requestItemService; + + // Test the unit. Template supplies the Subject: value + requestItemEmailNotifier.sendResponse(context, ri, null, TEST_MESSAGE); + + // Evaluate the test results. + + // Check the To: address. + Address[] myAddresses = JavaMailTestTransport.getAddresses(); + assertEquals("Should have one To: address.", + myAddresses.length, 1); + assertThat("To: should be an Internet address", + myAddresses[0], instanceOf(InternetAddress.class)); + String address = ((InternetAddress)myAddresses[0]).getAddress(); + assertEquals("To: address should match requestor.", + ri.getReqEmail(), address); + + // Check the message body. + Message myMessage = JavaMailTestTransport.getMessage(); + + Object content = myMessage.getContent(); + assertThat("Body should be a single text bodypart", + content, instanceOf(String.class)); + + assertThat("Should contain the helpdesk name", + (String)content, containsString(HELPDESK_NAME)); + + assertThat("Should contain the test custom message", + (String)content, containsString(TEST_MESSAGE)); + + // FIXME Note that this depends on the content of the rejection template! + assertThat("Should contain the word 'denied'.", + (String)content, containsString("denied")); + } + + /** + * Test of requestOpenAccess method, of class RequestItemEmailNotifier. + * @throws java.lang.Exception passed through. + */ + @Ignore + @Test + public void testRequestOpenAccess() throws Exception { + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategyTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategyTest.java new file mode 100644 index 000000000000..b03d7576f991 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemHelpdeskStrategyTest.java @@ -0,0 +1,118 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import static org.junit.Assert.assertEquals; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.AbstractUnitTest; +import org.dspace.builder.AbstractBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; + +/** + * + * @author mwood + */ +public class RequestItemHelpdeskStrategyTest + extends AbstractUnitTest { + private static final String HELPDESK_ADDRESS = "helpdesk@example.com"; + private static final String AUTHOR_ADDRESS = "john.doe@example.com"; + + private static ConfigurationService configurationService; + private static EPersonService epersonService; + private static EPerson johnDoe; + + private Item item; + + @BeforeClass + public static void setUpClass() + throws SQLException { + AbstractBuilder.init(); // AbstractUnitTest doesn't do this for us. + + configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + epersonService = EPersonServiceFactory.getInstance().getEPersonService(); + + Context ctx = new Context(); + ctx.turnOffAuthorisationSystem(); + johnDoe = EPersonBuilder.createEPerson(ctx) + .withEmail(AUTHOR_ADDRESS) + .withNameInMetadata("John", "Doe") + .build(); + ctx.restoreAuthSystemState(); + ctx.complete(); + } + + @AfterClass + public static void tearDownClass() { + AbstractBuilder.destroy(); // AbstractUnitTest doesn't do this for us. + } + + @Before + public void setUp() { + context = new Context(); + context.setCurrentUser(johnDoe); + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + item = ItemBuilder.createItem(context, collection) + .build(); + context.restoreAuthSystemState(); + context.setCurrentUser(null); + } + + /** + * Test of getRequestItemAuthor method, of class RequestItemHelpdeskStrategy. + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetRequestItemAuthor() + throws Exception { + RequestItemHelpdeskStrategy instance = new RequestItemHelpdeskStrategy(); + instance.configurationService = configurationService; + instance.ePersonService = epersonService; + + // Check with help desk enabled. + configurationService.setProperty(RequestItemHelpdeskStrategy.P_HELPDESK_OVERRIDE, "true"); + configurationService.setProperty(RequestItemHelpdeskStrategy.P_MAIL_HELPDESK, HELPDESK_ADDRESS); + List authors = instance.getRequestItemAuthor(context, item); + assertEquals("Wrong author address", HELPDESK_ADDRESS, authors.get(0).getEmail()); + + // Check with help desk disabled. + configurationService.setProperty(RequestItemHelpdeskStrategy.P_HELPDESK_OVERRIDE, "false"); + authors = instance.getRequestItemAuthor(context, item); + assertEquals("Wrong author address", AUTHOR_ADDRESS, authors.get(0).getEmail()); + } + + /** + * Test of getHelpDeskPerson method, of class RequestItemHelpdeskStrategy. + * @throws java.lang.Exception passed through. + */ + @Ignore + @Test + public void testGetHelpDeskPerson() throws Exception { + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemSubmitterStrategyTest.java b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemSubmitterStrategyTest.java new file mode 100644 index 000000000000..f485a591b079 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/requestitem/RequestItemSubmitterStrategyTest.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.requestitem; + +import static org.junit.Assert.assertEquals; + +import java.sql.SQLException; +import java.util.List; + +import org.dspace.AbstractUnitTest; +import org.dspace.builder.AbstractBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * + * @author mwood + */ +public class RequestItemSubmitterStrategyTest + extends AbstractUnitTest { + private static final String AUTHOR_ADDRESS = "john.doe@example.com"; + + private static EPerson johnDoe; + + private Item item; + + @BeforeClass + public static void setUpClass() + throws SQLException { + AbstractBuilder.init(); // AbstractUnitTest doesn't do this for us. + + Context ctx = new Context(); + ctx.turnOffAuthorisationSystem(); + johnDoe = EPersonBuilder.createEPerson(ctx) + .withEmail(AUTHOR_ADDRESS) + .withNameInMetadata("John", "Doe") + .build(); + ctx.restoreAuthSystemState(); + ctx.complete(); + } + + @AfterClass + public static void tearDownClass() { + AbstractBuilder.destroy(); // AbstractUnitTest doesn't do this for us. + } + + @Before + public void setUp() { + context = new Context(); + context.setCurrentUser(johnDoe); + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + item = ItemBuilder.createItem(context, collection) + .build(); + context.restoreAuthSystemState(); + context.setCurrentUser(null); + } + + /** + * Test of getRequestItemAuthor method, of class RequestItemSubmitterStrategy. + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetRequestItemAuthor() + throws Exception { + RequestItemSubmitterStrategy instance = new RequestItemSubmitterStrategy(); + List author = instance.getRequestItemAuthor(context, item); + assertEquals("Wrong author address", AUTHOR_ADDRESS, author.get(0).getEmail()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java b/dspace-api/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java index aced81cbdfdb..7cc1e8cb45d7 100644 --- a/dspace-api/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java +++ b/dspace-api/src/test/java/org/dspace/app/scripts/handler/impl/TestDSpaceRunnableHandler.java @@ -61,6 +61,12 @@ public void logError(String message) { errorMessages.add(message); } + @Override + public void logError(String message, Throwable throwable) { + super.logError(message, throwable); + errorMessages.add(message); + } + public List getInfoMessages() { return infoMessages; } diff --git a/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java b/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java index d7c4877fa53b..239d2864bfb1 100644 --- a/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java +++ b/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java @@ -11,33 +11,21 @@ import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; +import java.util.Objects; import org.dspace.app.sherpa.v2.SHERPAPublisherResponse; import org.dspace.app.sherpa.v2.SHERPAResponse; /** * Mock implementation for SHERPA API service (used by SHERPA submit service to check - * journal policies) - * This class will return mock SHERPA responses so they can be parsed and turned into external data objects downstream + * journal policies). + * This class will return mock SHERPA responses so they can be parsed and turned + * into external data objects downstream. * * @author Kim Shepherd */ public class MockSHERPAService extends SHERPAService { - /** - * Simple overridden 'searchByJournalISSN' so that we do attempt to build the URI but rather than make - * an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our - * test resources. - * If URI creation, parsing, or IO fails along the way, a SHERPAResponse with an error message set will be - * returned. - * @param query ISSN string to pass in an "issn equals" API query - * @return SHERPAResponse - */ - @Override - public SHERPAResponse searchByJournalISSN(String query) { - return performRequest("publication", "issn", "equals", query, 0, 1); - } - /** * Simple overridden performRequest so that we do attempt to build the URI but rather than make * an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our @@ -66,8 +54,12 @@ public SHERPAResponse performRequest(String type, String field, String predicate return new SHERPAResponse("Error building URI"); } - // Get mock JSON - in this case, a known good result for The Lancet - content = getClass().getResourceAsStream("thelancet.json"); + // Get mock JSON + // if a file with the name contained in the value does not exist, returns thelancet.json + content = getContent(value.concat(".json")); + if (Objects.isNull(content)) { + content = getContent("thelancet.json"); + } // Parse JSON input stream and return response for later evaluation return new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON); @@ -87,6 +79,10 @@ public SHERPAResponse performRequest(String type, String field, String predicate } } + private InputStream getContent(String fileName) { + return getClass().getResourceAsStream(fileName); + } + /** * Simple overridden performPublisherRequest so that we do attempt to build the URI but rather than make * an actual HTTP call, return parsed SHERPAPublisherResponse for PLOS based on known-good JSON stored with our @@ -110,7 +106,7 @@ public SHERPAPublisherResponse performPublisherRequest(String type, String field try { // Prepare the URI - this will not be used but should be evaluated // in case a syntax exception is thrown - URI uri = prepareQuery(value, endpoint, apiKey); + URI unuseduri = prepareQuery(value, endpoint, apiKey); // Get mock JSON - in this case, a known good result for PLOS content = getClass().getResourceAsStream("plos.json"); @@ -132,4 +128,5 @@ public SHERPAPublisherResponse performPublisherRequest(String type, String field return new SHERPAPublisherResponse(e.getMessage()); } } + } diff --git a/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java b/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java index 1eaa916f56af..438d754aa5f2 100644 --- a/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java @@ -11,7 +11,6 @@ import static org.junit.Assert.assertTrue; import java.sql.SQLException; -import java.util.List; import org.dspace.AbstractUnitTest; import org.dspace.app.sherpa.v2.SHERPAResponse; @@ -109,20 +108,18 @@ public void testGetISSNs() throws AuthorizeException, SQLException { // Get responses from SHERPA submit service, which should inspect item ISSNs and perform search // on the mock SHERPA service - List responses = sherpaSubmitService.searchRelatedJournals(context, testItem); + SHERPAResponse response = sherpaSubmitService.searchRelatedJournals(context, testItem); // Make sure response is not null or empty - assertTrue("Response list should not be null or empty", - responses != null && !responses.isEmpty()); + assertTrue("Response should not be null", response != null); // For each response (there should be only one based on test data) perform the standard set // of thorough parsing tests - for (SHERPAResponse response : responses) { - // Assert response is not error, or fail with message - assertFalse("Response was flagged as 'isError'", response.isError()); - // Skip remainder of parsing tests - these are already done in SHERPAServiceTEst - } + // Assert response is not error, or fail with message + assertFalse("Response was flagged as 'isError'", response.isError()); + + // Skip remainder of parsing tests - these are already done in SHERPAServiceTEst } } diff --git a/dspace-api/src/test/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncIT.java b/dspace-api/src/test/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncIT.java new file mode 100644 index 000000000000..4fa881257e0f --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncIT.java @@ -0,0 +1,154 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.solrdatabaseresync; + +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD; +import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +import java.util.List; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocumentList; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.discovery.MockSolrSearchCore; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +public class SolrDatabaseResyncIT extends AbstractIntegrationTestWithDatabase { + + private final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); + + private final CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + + private MockSolrSearchCore searchService; + + private Collection col; + private Item item1; + private Item item2; + + @Before + public void setUp() throws Exception { + super.setUp(); + configurationService.setProperty("solr-database-resync.time-until-reindex", 1); + + ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); + searchService = serviceManager.getServiceByName(null, MockSolrSearchCore.class); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context).withName("Parent Community").build(); + col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection").build(); + + item1 = ItemBuilder.createItem(context, col) + .withTitle("Public item 1") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + item2 = ItemBuilder.createItem(context, col) + .withTitle("Public item 2") + .withIssueDate("2011-08-13") + .withAuthor("Smith, Maria") + .withSubject("TestingForMore") + .build(); + + context.setDispatcher("noindex"); + } + + @Test + public void solrPreDBStatusExistingItemTest() throws Exception { + // Items were created, they should contain a predb status in solr + assertHasPreDBStatus(item1); + assertHasPreDBStatus(item2); + + performSolrDatabaseResyncScript(); + + // Database status script was performed, their predb status should be removed + assertHasNoPreDBStatus(item1); + assertHasNoPreDBStatus(item2); + + context.restoreAuthSystemState(); + } + + @Test + public void solrPreDBStatusRemovedItemTest() throws Exception { + // Items were created, they should contain a predb status in solr + assertHasPreDBStatus(item1); + assertHasPreDBStatus(item2); + + collectionService.delete(context, col); + + // Items were deleted, they should still contain a predb status in solr for now + assertHasPreDBStatus(item1); + assertHasPreDBStatus(item2); + + performSolrDatabaseResyncScript(); + + // Database status script was performed, their solr document should have been removed + assertNoSolrDocument(item1); + assertNoSolrDocument(item2); + + context.restoreAuthSystemState(); + } + + public void assertHasNoPreDBStatus(Item item) throws Exception { + assertNotEquals(STATUS_FIELD_PREDB, getStatus(item)); + } + + public void assertHasPreDBStatus(Item item) throws Exception { + assertEquals(STATUS_FIELD_PREDB, getStatus(item)); + } + + public void assertNoSolrDocument(Item item) throws Exception { + SolrDocumentList solrDocumentList = getSolrDocumentList(item); + assertEquals(0, solrDocumentList.size()); + } + + public String getStatus(Item item) throws Exception { + SolrDocumentList solrDocumentList = getSolrDocumentList(item); + List fieldValues = ((List) solrDocumentList.get(0).getFieldValues(STATUS_FIELD)); + if (CollectionUtils.isNotEmpty(fieldValues)) { + return (String) fieldValues.get(0); + } else { + return null; + } + } + + public SolrDocumentList getSolrDocumentList(Item item) throws Exception { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.setQuery("search.resourceid:" + item.getID()); + QueryResponse queryResponse = searchService.getSolr().query(solrQuery); + return queryResponse.getResults(); + } + + public void performSolrDatabaseResyncScript() throws Exception { + String[] args = new String[] {"solr-database-resync"}; + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher + .handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/app/util/ConfigurationIT.java b/dspace-api/src/test/java/org/dspace/app/util/ConfigurationIT.java new file mode 100644 index 000000000000..388b467e9799 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/util/ConfigurationIT.java @@ -0,0 +1,268 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.app.util; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.collection.IsArrayContainingInAnyOrder.arrayContainingInAnyOrder; +import static org.junit.Assert.assertEquals; + +import org.dspace.AbstractDSpaceTest; +import org.dspace.services.ConfigurationService; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.contrib.java.lang.system.Assertion; +import org.junit.contrib.java.lang.system.ExpectedSystemExit; +import org.junit.contrib.java.lang.system.SystemErrRule; +import org.junit.contrib.java.lang.system.SystemOutRule; + +/** + * Tests for configuration utilities. + * + * Because our command-line tools call System.exit(), we can't expect any code + * (such as assertions) following the call to main() to be executed. Instead we + * set up expectations in advance and attach them to an exit() trapper. + * + * @author mhwood + */ +public class ConfigurationIT + extends AbstractDSpaceTest { + + private static ConfigurationService cfg; + + private static final String SINGLE_PROPERTY = "test.single"; + private static final String SINGLE_VALUE = "value"; + + private static final String ARRAY_PROPERTY = "test.array"; + private static final String[] ARRAY_VALUE = { "one", "two" }; + + private static final String PLACEHOLDER_PROPERTY = "test.substituted"; + private static final String PLACEHOLDER_VALUE = "insert ${test.single} here"; // Keep aligned with SINGLE_NAME + private static final String SUBSTITUTED_VALUE = "insert value here"; // Keep aligned with SINGLE_VALUE + + private static final String MISSING_PROPERTY = "test.missing"; + + /** Capture standard output. */ + @Rule + public final SystemOutRule systemOutRule = new SystemOutRule(); + + /** Capture standard error. */ + @Rule + public final SystemErrRule systemErrRule = new SystemErrRule(); + + /** Capture System.exit() value. */ + @Rule + public final ExpectedSystemExit expectedSystemExit = ExpectedSystemExit.none(); + + /** + * Create some expected properties before all tests. + */ + @BeforeClass + public static void setupSuite() { + cfg = kernelImpl.getConfigurationService(); + + cfg.setProperty(SINGLE_PROPERTY, SINGLE_VALUE); + cfg.setProperty(ARRAY_PROPERTY, ARRAY_VALUE); + cfg.setProperty(PLACEHOLDER_PROPERTY, PLACEHOLDER_VALUE); + cfg.setProperty(MISSING_PROPERTY, null); // Ensure that this one is undefined + } + + /** + * After all tests, remove the properties that were created at entry. + */ + @AfterClass + public static void teardownSuite() { + if (null != cfg) { + cfg.setProperty(SINGLE_PROPERTY, null); + cfg.setProperty(ARRAY_PROPERTY, null); + cfg.setProperty(PLACEHOLDER_PROPERTY, null); + } + } + + /** + * Test fetching all values of a single-valued property. + */ + @Test + public void testMainAllSingle() { + String[] argv; + argv = new String[] { + "--property", SINGLE_PROPERTY + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output, arrayWithSize(1)); + } + }); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output[0], equalTo(SINGLE_VALUE)); + } + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching all values of an array property. + */ + @Test + public void testMainAllArray() { + String[] argv; + argv = new String[] { + "--property", ARRAY_PROPERTY + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output, arrayWithSize(ARRAY_VALUE.length)); + } + }); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output, arrayContainingInAnyOrder(ARRAY_VALUE)); + } + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching all values of a single-valued property containing property + * placeholders. + */ + @Test + public void testMainAllSubstitution() { + String[] argv; + argv = new String[] { + "--property", PLACEHOLDER_PROPERTY + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output, arrayWithSize(1)); + } + }); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output[0], equalTo(SUBSTITUTED_VALUE)); + } + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching all values of a single-valued property containing property + * placeholders, suppressing property substitution. + */ + @Test + public void testMainAllRaw() { + // Can it handle a raw property (with substitution placeholders)? + String[] argv; + argv = new String[] { + "--property", PLACEHOLDER_PROPERTY, + "--raw" + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output, arrayWithSize(1)); + } + }); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String[] output = systemOutRule.getLogWithNormalizedLineSeparator() + .split("\n"); + assertThat(output[0], equalTo(PLACEHOLDER_VALUE)); + } + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching all values of an undefined property. + */ + @Test + public void testMainAllUndefined() { + // Can it handle an undefined property? + String[] argv; + argv = new String[] { + "--property", MISSING_PROPERTY + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(new Assertion() { + @Override public void checkAssertion() { + String outputs = systemOutRule.getLogWithNormalizedLineSeparator(); + String[] output = outputs.split("\n"); + assertThat(output, arrayWithSize(0)); // Huh? Shouldn't split() return { "" } ? + } + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching only the first value of an array property. + */ + @Test + public void testMainFirstArray() { + String[] argv = new String[] { + "--property", ARRAY_PROPERTY, + "--first" + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(() -> { + String outputs = systemOutRule.getLogWithNormalizedLineSeparator(); + String[] output = outputs.split("\n"); + assertThat(output, arrayWithSize(1)); + assertEquals("--first should return first value", output[0], ARRAY_VALUE[0]); + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } + + /** + * Test fetching a single-valued property using {@code --first} + */ + @Test + public void testMainFirstSingle() { + String[] argv = new String[] { + "--property", SINGLE_PROPERTY, + "--first" + }; + expectedSystemExit.expectSystemExitWithStatus(0); + expectedSystemExit.checkAssertionAfterwards(() -> { + String outputs = systemOutRule.getLogWithNormalizedLineSeparator(); + String[] output = outputs.split("\n"); + assertThat(output, arrayWithSize(1)); + assertEquals("--first should return only value", output[0], SINGLE_VALUE); + }); + systemOutRule.enableLog(); + Configuration.main(argv); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/util/GoogleBitstreamComparatorTest.java b/dspace-api/src/test/java/org/dspace/app/util/GoogleBitstreamComparatorTest.java index 84e776b9835a..78142c925899 100644 --- a/dspace-api/src/test/java/org/dspace/app/util/GoogleBitstreamComparatorTest.java +++ b/dspace-api/src/test/java/org/dspace/app/util/GoogleBitstreamComparatorTest.java @@ -164,6 +164,12 @@ public void testSameMimeTypeSameSize() throws Exception { toSort.get(1).getName()); assertEquals("Bitstreams have same size and type, so order should remain unchanged", "bitstream3", toSort.get(2).getName()); + + // Also, verify all bitstreams are considered equal (comparison returns 0) + GoogleBitstreamComparator comparator = new GoogleBitstreamComparator(context, settings); + assertEquals(0, comparator.compare(bitstream1, bitstream2)); + assertEquals(0, comparator.compare(bitstream2, bitstream3)); + assertEquals(0, comparator.compare(bitstream3, bitstream1)); } /** diff --git a/dspace-api/src/test/java/org/dspace/app/util/GoogleMetadataTest.java b/dspace-api/src/test/java/org/dspace/app/util/GoogleMetadataTest.java index e2b49ab76a56..c2543ca17b8c 100644 --- a/dspace-api/src/test/java/org/dspace/app/util/GoogleMetadataTest.java +++ b/dspace-api/src/test/java/org/dspace/app/util/GoogleMetadataTest.java @@ -8,18 +8,29 @@ package org.dspace.app.util; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.ByteArrayInputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.SQLException; +import java.time.Period; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Date; import java.util.List; +import java.util.Map; import com.google.common.base.Splitter; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Collection; @@ -30,6 +41,10 @@ import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BundleService; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -39,7 +54,7 @@ public class GoogleMetadataTest extends AbstractUnitTest { /** * log4j category */ - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(GoogleMetadataTest.class); + private static final Logger log = LogManager.getLogger(); /** * Item instance for the tests @@ -52,6 +67,10 @@ public class GoogleMetadataTest extends AbstractUnitTest { private BitstreamService bitstreamService; + private ResourcePolicyService resourcePolicyService; + + private GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + private Community community; /** @@ -80,6 +99,8 @@ public void init() { bundleService = ContentServiceFactory.getInstance().getBundleService(); bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService(); bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + resourcePolicyService = AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + groupService = EPersonServiceFactory.getInstance().getGroupService(); } catch (AuthorizeException ex) { log.error("Authorization Error in init", ex); fail("Authorization Error in init: " + ex.getMessage()); @@ -298,6 +319,7 @@ public void testGetPDFURLWithNoBitstreams() throws Exception { /** * Test empty bitstreams + * @throws java.lang.Exception passed through. */ @Test public void testGetPDFURLWithEmptyBitstreams() throws Exception { @@ -326,6 +348,48 @@ public void testGetPDFURLWithEmptyBitstreams() throws Exception { assertEquals("small", urlSplitted.get(urlSplitted.size() - 1)); } + /** + * Verify there is no mapping for {@link GoogleMetadata#PDF} if there are + * only embargoed (non-publicly accessible bitstream) files. + * @throws java.lang.Exception passed through. + */ + @Test + public void testGetPdfUrlOfEmbargoed() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = ContentServiceFactory.getInstance().getBundleService().create(context, it, "ORIGINAL"); + + Bitstream b = bitstreamService.create( + context, new ByteArrayInputStream("Larger file than primary".getBytes(StandardCharsets.UTF_8))); + b.setName(context, "first"); + b.setFormat(context, bitstreamFormatService.create(context)); + b.getFormat(context).setMIMEType("unknown"); + bundleService.addBitstream(context, bundle, b); + // Set 3 month embargo on pdf + Period period = Period.ofMonths(3); + Date embargoDate = Date.from(ZonedDateTime.now(ZoneOffset.UTC) + .plus(period) + .toInstant()); + Group anonGroup = groupService.findByName(context, Group.ANONYMOUS); + authorizeService.removeAllPolicies(context, b); + resourcePolicyService.removeAllPolicies(context, b); + ResourcePolicy rp = authorizeService.createOrModifyPolicy(null, context, null, anonGroup, + null, embargoDate, Constants.READ, "GoogleMetadataTest", b); + if (rp != null) { + resourcePolicyService.update(context, rp); + } + + GoogleMetadata gm = new GoogleMetadata(this.context, it); + assertTrue(gm.getPDFURL().isEmpty()); + // No value for citation_pdf_url because only one embargoed bitstream + boolean containsPdfUrl = false; + for (Map.Entry mapping: gm.getMappings()) { + if (mapping.getKey().equalsIgnoreCase(gm.PDF)) { + containsPdfUrl = true; + } + } + assertFalse(containsPdfUrl); + } + @After @Override public void destroy() { diff --git a/dspace-api/src/test/java/org/dspace/app/util/RegexPatternUtilsTest.java b/dspace-api/src/test/java/org/dspace/app/util/RegexPatternUtilsTest.java new file mode 100644 index 000000000000..30a9100ad4a5 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/util/RegexPatternUtilsTest.java @@ -0,0 +1,214 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.util; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +import org.dspace.AbstractUnitTest; +import org.junit.Test; + +/** + * Tests for RegexPatternUtils + * + * @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com) + * + */ +public class RegexPatternUtilsTest extends AbstractUnitTest { + + @Test + public void testValidRegexWithFlag() { + final String insensitiveWord = "/[a-z]+/i"; + Pattern computePattern = Pattern.compile(insensitiveWord); + assertNotNull(computePattern); + + Matcher matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("DSpace"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Community"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("/wrongpattern/i"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("001"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("?/'`}{][<>.,"); + assertFalse(matcher.matches()); + computePattern = RegexPatternUtils.computePattern(insensitiveWord); + assertNotNull(computePattern); + + matcher = computePattern.matcher("Hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("DSpace"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("Community"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("/wrong-pattern/i"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("001"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("?/'`}{][<>.,"); + assertFalse(matcher.matches()); + } + + @Test + public void testRegexWithoutFlag() { + final String sensitiveWord = "[a-z]+"; + Pattern computePattern = RegexPatternUtils.computePattern(sensitiveWord); + assertNotNull(computePattern); + + Matcher matcher = computePattern.matcher("hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("dspace"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("community"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("DSpace"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Community"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("/wrongpattern/i"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("001"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("?/'`}{][<>.,"); + assertFalse(matcher.matches()); + + final String sensitiveWordWithDelimiter = "/[a-z]+/"; + computePattern = RegexPatternUtils.computePattern(sensitiveWordWithDelimiter); + assertNotNull(computePattern); + + matcher = computePattern.matcher("hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("dspace"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("community"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("DSpace"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Community"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("/wrongpattern/i"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("001"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("?/'`}{][<>.,"); + assertFalse(matcher.matches()); + } + + @Test + public void testWithFuzzyRegex() { + String fuzzyRegex = "/[a-z]+"; + Pattern computePattern = RegexPatternUtils.computePattern(fuzzyRegex); + assertNotNull(computePattern); + + Matcher matcher = computePattern.matcher("/hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + + fuzzyRegex = "[a-z]+/"; + computePattern = RegexPatternUtils.computePattern(fuzzyRegex); + matcher = computePattern.matcher("hello/"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("/hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + + // equals to pattern \\[a-z]+\\ -> searching for a word delimited by '\' + fuzzyRegex = "\\\\[a-z]+\\\\"; + computePattern = RegexPatternUtils.computePattern(fuzzyRegex); + // equals to '\hello\' + matcher = computePattern.matcher("\\hello\\"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("/hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + + // equals to pattern /[a-z]+/ -> searching for a string delimited by '/' + fuzzyRegex = "\\/[a-z]+\\/"; + computePattern = RegexPatternUtils.computePattern(fuzzyRegex); + matcher = computePattern.matcher("/hello/"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("/hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("hello"); + assertFalse(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertFalse(matcher.matches()); + } + + @Test + public void testInvalidRegex() { + String invalidSensitive = "[a-z+"; + assertThrows(PatternSyntaxException.class, () -> RegexPatternUtils.computePattern(invalidSensitive)); + + String invalidRange = "a{1-"; + assertThrows(PatternSyntaxException.class, () -> RegexPatternUtils.computePattern(invalidRange)); + + String invalidGroupPattern = "(abc"; + assertThrows(PatternSyntaxException.class, () -> RegexPatternUtils.computePattern(invalidGroupPattern)); + + String emptyPattern = ""; + Pattern computePattern = RegexPatternUtils.computePattern(emptyPattern); + assertNull(computePattern); + + String blankPattern = " "; + computePattern = RegexPatternUtils.computePattern(blankPattern); + assertNull(computePattern); + + String nullPattern = null; + computePattern = RegexPatternUtils.computePattern(nullPattern); + assertNull(computePattern); + } + + @Test + public void testMultiFlagRegex() { + String multilineSensitive = "/[a-z]+/gi"; + Pattern computePattern = RegexPatternUtils.computePattern(multilineSensitive); + assertNotNull(computePattern); + Matcher matcher = computePattern.matcher("hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("Hello"); + assertTrue(matcher.matches()); + + multilineSensitive = "/[a-z]+/gim"; + computePattern = RegexPatternUtils.computePattern(multilineSensitive); + assertNotNull(computePattern); + matcher = computePattern.matcher("Hello" + System.lineSeparator() + "Everyone"); + assertTrue(matcher.find()); + assertEquals("Hello", matcher.group()); + assertTrue(matcher.find()); + assertEquals("Everyone", matcher.group()); + + matcher = computePattern.matcher("hello"); + assertTrue(matcher.matches()); + matcher = computePattern.matcher("HELLO"); + assertTrue(matcher.matches()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java b/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java new file mode 100644 index 000000000000..cb1f828b93c4 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java @@ -0,0 +1,90 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.util; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.util.ArrayList; +import java.util.List; + +import org.dspace.AbstractUnitTest; +import org.dspace.submit.factory.SubmissionServiceFactory; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Tests for parsing and utilities on submission config forms / readers + * + * @author Kim Shepherd + */ +public class SubmissionConfigTest extends AbstractUnitTest { + + DCInputsReader inputReader; + + @BeforeClass + public static void setUpClass() { + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() throws DCInputsReaderException { + inputReader = new DCInputsReader(); + } + + @After + public void tearDown() { + inputReader = null; + } + + @Test + public void testReadAndProcessTypeBindSubmissionConfig() + throws SubmissionConfigReaderException, DCInputsReaderException { + // Set up test data. This should match the typebind test submission / form config + String typeBindHandle = "123456789/typebind-test"; + String typeBindSubmissionName = "typebindtest"; + String typeBindSubmissionStepName = "typebindtest"; + + // Expected field lists from typebindtest form + List allConfiguredFields = new ArrayList<>(); + allConfiguredFields.add("dc.title"); + allConfiguredFields.add("dc.date.issued"); + allConfiguredFields.add("dc.type"); + allConfiguredFields.add("dc.identifier.isbn"); + List unboundFields = allConfiguredFields.subList(0, 3); + + // Get submission configuration + SubmissionConfig submissionConfig = + SubmissionServiceFactory.getInstance().getSubmissionConfigService() + .getSubmissionConfigByCollection(typeBindHandle); + // Submission name should match name defined in item-submission.xml + assertEquals(typeBindSubmissionName, submissionConfig.getSubmissionName()); + // Step 0 - our process only has one step. It should not be null and have the ID typebindtest + SubmissionStepConfig submissionStepConfig = submissionConfig.getStep(0); + assertNotNull(submissionStepConfig); + assertEquals(typeBindSubmissionStepName, submissionStepConfig.getId()); + // Get inputs and allowed fields + DCInputSet inputConfig = inputReader.getInputsByFormName(submissionStepConfig.getId()); + List allowedFieldsForBook = inputConfig.populateAllowedFieldNames("Book"); + List allowedFieldsForBookChapter = inputConfig.populateAllowedFieldNames("Book chapter"); + List allowedFieldsForArticle = inputConfig.populateAllowedFieldNames("Article"); + List allowedFieldsForNoType = inputConfig.populateAllowedFieldNames(null); + // Book and book chapter should be allowed all 5 fields (each is bound to dc.identifier.isbn) + assertEquals(allConfiguredFields, allowedFieldsForBook); + assertEquals(allConfiguredFields, allowedFieldsForBookChapter); + // Article and type should match a subset of the fields without ISBN + assertEquals(unboundFields, allowedFieldsForArticle); + assertEquals(unboundFields, allowedFieldsForNoType); + } +} diff --git a/dspace-api/src/test/java/org/dspace/authenticate/IPMatcherTest.java b/dspace-api/src/test/java/org/dspace/authenticate/IPMatcherTest.java index 6f73c3abc467..955468f06256 100644 --- a/dspace-api/src/test/java/org/dspace/authenticate/IPMatcherTest.java +++ b/dspace-api/src/test/java/org/dspace/authenticate/IPMatcherTest.java @@ -29,7 +29,7 @@ public class IPMatcherTest { private static final String IP6_FULL_ADDRESS2 = "2001:18e8:3:171:218:8bff:fe2a:56a3"; private static final String IP6_MASKED_ADDRESS = "2001:18e8:3::/48"; - private final static int increment = 6; + private final static int increment = 17; private static IPMatcher ip6FullMatcher; private static IPMatcher ip6MaskedMatcher; diff --git a/dspace-api/src/test/java/org/dspace/authority/AuthorityValueTest.java b/dspace-api/src/test/java/org/dspace/authority/AuthorityValueTest.java new file mode 100644 index 000000000000..07c4b65f40f2 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/authority/AuthorityValueTest.java @@ -0,0 +1,52 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authority; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.util.Date; + +import org.junit.Test; + +/** + * + * @author mwood + */ +public class AuthorityValueTest { + /** + * Test of stringToDate method, of class AuthorityValue. + */ + @Test + public void testStringToDate() { + Date expected; + Date actual; + + // Test an invalid date. + actual = AuthorityValue.stringToDate("not a date"); + assertNull("Unparseable date should return null", actual); + + // Test a date-time without zone or offset. + expected = Date.from(LocalDateTime.of(1957, 01, 27, 01, 23, 45) + .atZone(ZoneId.systemDefault()) + .toInstant()); + actual = AuthorityValue.stringToDate("1957-01-27T01:23:45"); + assertEquals("Local date-time should convert", expected, actual); + + // Test a date-time with milliseconds and offset from UTC. + expected = Date.from(LocalDateTime.of(1957, 01, 27, 01, 23, 45, 678_000_000) + .atZone(ZoneOffset.of("-05")) + .toInstant()); + actual = AuthorityValue.stringToDate("1957-01-27T01:23:45.678-05"); + assertEquals("Zoned date-time with milliseconds should convert", + expected, actual); + } +} diff --git a/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java b/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java index 2f551315e1b7..562aa86a585e 100644 --- a/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java +++ b/dspace-api/src/test/java/org/dspace/authority/orcid/MockOrcid.java @@ -31,18 +31,21 @@ public void init() { OrcidRestConnector orcidRestConnector = Mockito.mock(OrcidRestConnector.class); when(orcidRestConnector.get(ArgumentMatchers.startsWith("search?"), ArgumentMatchers.any())) .thenAnswer(new Answer() { + @Override public InputStream answer(InvocationOnMock invocation) { return this.getClass().getResourceAsStream("orcid-search-noresults.xml"); } }); when(orcidRestConnector.get(ArgumentMatchers.startsWith("search?q=Bollini"), ArgumentMatchers.any())) .thenAnswer(new Answer() { + @Override public InputStream answer(InvocationOnMock invocation) { return this.getClass().getResourceAsStream("orcid-search.xml"); } }); when(orcidRestConnector.get(ArgumentMatchers.endsWith("/person"), ArgumentMatchers.any())) .thenAnswer(new Answer() { + @Override public InputStream answer(InvocationOnMock invocation) { return this.getClass().getResourceAsStream("orcid-person-record.xml"); } diff --git a/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java b/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java index 46435ec8f156..70eaa2a0b909 100644 --- a/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/authorize/AuthorizeServiceTest.java @@ -27,7 +27,7 @@ import org.junit.Test; /** - * Created by pbecker as he wanted to write a test against DS-3572. + * Created by pbecker to write a test against DS-3572. * This definitely needs to be extended, but it's at least a start. */ public class AuthorizeServiceTest extends AbstractUnitTest { @@ -80,7 +80,7 @@ public void testauthorizeMethodDoesNotConfuseEPersonWithCurrentUser() { } try { - // eperson1 should be able to write as he is member of a group that has write permissions + // eperson1 should be able to write as it is a member of a group that has write permissions Assert.assertTrue(authorizeService.authorizeActionBoolean(context, eperson1, dso, Constants.WRITE, true)); // person2 shouldn't have write access Assert.assertFalse(authorizeService.authorizeActionBoolean(context, eperson2, dso, Constants.WRITE, true)); diff --git a/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorIT.java b/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorIT.java new file mode 100644 index 000000000000..7286fb8e8374 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/authorize/RegexPasswordValidatorIT.java @@ -0,0 +1,84 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.authorize; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.when; + +import org.dspace.AbstractIntegrationTest; +import org.dspace.services.ConfigurationService; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +/** + * Unit tests for {@link RegexPasswordValidator}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +@RunWith(MockitoJUnitRunner.class) +public class RegexPasswordValidatorIT extends AbstractIntegrationTest { + + @Mock + private ConfigurationService configurationService; + + @InjectMocks + private RegexPasswordValidator regexPasswordValidator; + + @Before + public void setup() { + when(configurationService.getProperty("authentication-password.regex-validation.pattern")) + .thenReturn("^(?=.*[a-z])(?=.*[A-Z])(?=.*\\d)(?=.*[^\\da-zA-Z]).{8,15}$"); + } + + @Test + public void testValidPassword() { + assertThat(regexPasswordValidator.isPasswordValid("TestPassword01!"), is(true)); + } + + @Test + public void testInvalidPasswordForMissingSpecialCharacter() { + assertThat(regexPasswordValidator.isPasswordValid("TestPassword01"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("TestPassword01?"), is(true)); + } + + @Test + public void testInvalidPasswordForMissingNumber() { + assertThat(regexPasswordValidator.isPasswordValid("TestPassword!"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("TestPassword1!"), is(true)); + } + + @Test + public void testInvalidPasswordForMissingUppercaseCharacter() { + assertThat(regexPasswordValidator.isPasswordValid("testpassword01!"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("testPassword01!"), is(true)); + } + + @Test + public void testInvalidPasswordForMissingLowercaseCharacter() { + assertThat(regexPasswordValidator.isPasswordValid("TESTPASSWORD01!"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("TESTPASSWORd01!"), is(true)); + } + + @Test + public void testInvalidPasswordForTooShortValue() { + assertThat(regexPasswordValidator.isPasswordValid("Test01!"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("Test012!"), is(true)); + } + + @Test + public void testInvalidPasswordForTooLongValue() { + assertThat(regexPasswordValidator.isPasswordValid("ThisIsAVeryLongPassword01!"), is(false)); + assertThat(regexPasswordValidator.isPasswordValid("IsAPassword012!"), is(true)); + } + +} \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/browse/CrossLinksTest.java b/dspace-api/src/test/java/org/dspace/browse/CrossLinksTest.java new file mode 100644 index 000000000000..83aab72d904e --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/browse/CrossLinksTest.java @@ -0,0 +1,103 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.browse; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.dspace.AbstractDSpaceTest; +import org.dspace.services.ConfigurationService; +import org.dspace.utils.DSpace; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CrossLinks} + */ +public class CrossLinksTest extends AbstractDSpaceTest { + protected ConfigurationService configurationService; + + + @Before + public void setUp() { + configurationService = new DSpace().getConfigurationService(); + } + + @Test + public void testFindLinkType_Null() throws Exception { + CrossLinks crossLinks = new CrossLinks(); + assertNull(crossLinks.findLinkType(null)); + } + + @Test + public void testFindLinkType_NoMatch() throws Exception { + CrossLinks crossLinks = new CrossLinks(); + String metadataField = "foo.bar.baz.does.not.exist"; + assertNull(crossLinks.findLinkType(metadataField)); + } + + @Test + public void testFindLinkType_WildcardMatch() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*"); + CrossLinks crossLinks = new CrossLinks(); + + String metadataField = "dc.contributor.author"; + assertEquals("author",crossLinks.findLinkType(metadataField)); + } + + @Test + public void testFindLinkType_SingleExactMatch_Author() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.author"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("type",crossLinks.findLinkType("dc.genre")); + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + } + + @Test + public void testFindLinkType_SingleExactMatch_Type() throws Exception { + configurationService.setProperty("webui.browse.link.1", "type:dc.genre"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("type",crossLinks.findLinkType("dc.genre")); + } + + @Test + public void testFindLinkType_MultipleExactMatches_DifferentIndexes() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.author"); + configurationService.setProperty("webui.browse.link.2", "type:dc.genre"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + assertEquals("type",crossLinks.findLinkType("dc.genre")); + } + + @Test + public void testFindLinkType_MultipleWildcardMatches_DifferentIndexes() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*"); + configurationService.setProperty("webui.browse.link.2", "subject:dc.subject.*"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + assertEquals("subject",crossLinks.findLinkType("dc.subject.lcsh")); + } + + @Test + public void testFindLinkType_MultiplExactAndWildcardMatches_DifferentIndexes() throws Exception { + configurationService.setProperty("webui.browse.link.1", "author:dc.contributor.*"); + configurationService.setProperty("webui.browse.link.2", "subject:dc.subject.*"); + configurationService.setProperty("webui.browse.link.3", "type:dc.genre"); + configurationService.setProperty("webui.browse.link.4", "dateissued:dc.date.issued"); + CrossLinks crossLinks = new CrossLinks(); + + assertEquals("author",crossLinks.findLinkType("dc.contributor.author")); + assertEquals("subject",crossLinks.findLinkType("dc.subject.lcsh")); + assertEquals("type",crossLinks.findLinkType("dc.genre")); + assertEquals("dateissued",crossLinks.findLinkType("dc.date.issued")); + } +} diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java index 1f8a9e4a5bea..775dfaabe20f 100644 --- a/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractBuilder.java @@ -11,7 +11,12 @@ import java.util.List; import org.apache.commons.collections4.CollectionUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.alerts.service.SystemWideAlertService; +import org.dspace.app.requestitem.factory.RequestItemServiceFactory; +import org.dspace.app.requestitem.service.RequestItemService; +import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.service.AuthorizeService; @@ -39,11 +44,23 @@ import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.RegistrationDataService; +import org.dspace.eperson.service.SubscribeService; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.service.OrcidHistoryService; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.OrcidTokenService; +import org.dspace.qaevent.service.QAEventService; import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.service.ProcessService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.submit.factory.SubmissionServiceFactory; +import org.dspace.submit.service.SubmissionConfigService; +import org.dspace.supervision.factory.SupervisionOrderServiceFactory; +import org.dspace.supervision.service.SupervisionOrderService; +import org.dspace.utils.DSpace; import org.dspace.versioning.factory.VersionServiceFactory; import org.dspace.versioning.service.VersionHistoryService; +import org.dspace.versioning.service.VersioningService; import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.service.XmlWorkflowService; import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService; @@ -89,6 +106,16 @@ public abstract class AbstractBuilder { static RelationshipTypeService relationshipTypeService; static EntityTypeService entityTypeService; static ProcessService processService; + static RequestItemService requestItemService; + static VersioningService versioningService; + static OrcidHistoryService orcidHistoryService; + static OrcidQueueService orcidQueueService; + static OrcidTokenService orcidTokenService; + static SystemWideAlertService systemWideAlertService; + static SubmissionConfigService submissionConfigService; + static SubscribeService subscribeService; + static SupervisionOrderService supervisionOrderService; + static QAEventService qaEventService; protected Context context; @@ -101,7 +128,7 @@ public abstract class AbstractBuilder { /** * log4j category */ - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AbstractDSpaceObjectBuilder.class); + private static final Logger log = LogManager.getLogger(); protected AbstractBuilder(Context context) { this.context = context; @@ -136,12 +163,28 @@ public static void init() { relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService(); entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); processService = ScriptServiceFactory.getInstance().getProcessService(); + requestItemService = RequestItemServiceFactory.getInstance().getRequestItemService(); + versioningService = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName(VersioningService.class.getName(), VersioningService.class); // Temporarily disabled claimedTaskService = XmlWorkflowServiceFactory.getInstance().getClaimedTaskService(); inProgressUserService = XmlWorkflowServiceFactory.getInstance().getInProgressUserService(); poolTaskService = XmlWorkflowServiceFactory.getInstance().getPoolTaskService(); workflowItemRoleService = XmlWorkflowServiceFactory.getInstance().getWorkflowItemRoleService(); + orcidHistoryService = OrcidServiceFactory.getInstance().getOrcidHistoryService(); + orcidQueueService = OrcidServiceFactory.getInstance().getOrcidQueueService(); + orcidTokenService = OrcidServiceFactory.getInstance().getOrcidTokenService(); + systemWideAlertService = DSpaceServicesFactory.getInstance().getServiceManager() + .getServicesByType(SystemWideAlertService.class).get(0); + try { + submissionConfigService = SubmissionServiceFactory.getInstance().getSubmissionConfigService(); + } catch (SubmissionConfigReaderException e) { + log.error(e.getMessage(), e); + } + subscribeService = ContentServiceFactory.getInstance().getSubscribeService(); + supervisionOrderService = SupervisionOrderServiceFactory.getInstance().getSupervisionOrderService(); + qaEventService = new DSpace().getSingletonService(QAEventService.class); } @@ -172,6 +215,14 @@ public static void destroy() { relationshipTypeService = null; entityTypeService = null; processService = null; + requestItemService = null; + versioningService = null; + orcidTokenService = null; + systemWideAlertService = null; + submissionConfigService = null; + subscribeService = null; + supervisionOrderService = null; + qaEventService = null; } diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java index a2a8aa9d4278..e7ebd8768e7d 100644 --- a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java @@ -8,6 +8,10 @@ package org.dspace.builder; import java.sql.SQLException; +import java.time.Instant; +import java.time.LocalDate; +import java.time.Period; +import java.time.ZoneId; import java.util.Date; import org.apache.logging.log4j.Logger; @@ -20,17 +24,13 @@ import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.MutablePeriod; -import org.joda.time.format.PeriodFormat; -import org.joda.time.format.PeriodFormatter; /** * Abstract builder to construct DSpace Objects * * @author Tom Desair (tom dot desair at atmire dot com) * @author Raf Ponsaerts (raf dot ponsaerts at atmire dot com) + * @param concrete type of DSpaceObject */ public abstract class AbstractDSpaceObjectBuilder extends AbstractBuilder { @@ -63,7 +63,7 @@ protected > B addMetadataValue(final T final String qualifier, final String value) { try { - getService().addMetadata(context, dso, schema, element, qualifier, Item.ANY, value); + getService().addMetadata(context, dso, schema, element, qualifier, null, value); } catch (Exception e) { return handleException(e); } @@ -112,21 +112,27 @@ protected > B setMetadataSingleValue(fi } /** - * Support method to grant the {@link Constants#READ} permission over an object only to the {@link Group#ANONYMOUS} - * after the specified embargoPeriod. Any other READ permissions will be removed + * Support method to grant the {@link Constants#READ} permission over an + * object only to the {@link Group#ANONYMOUS} after the specified + * embargoPeriod. Any other READ permissions will be removed. * + * @param type of this Builder. * @param embargoPeriod - * the embargo period after which the READ permission will be active. It is parsed using the - * {@link PeriodFormatter#parseMutablePeriod(String)} method of the joda library - * @param dso - * the DSpaceObject on which grant the permission - * @return the builder properly configured to retain read permission on the object only for the specified group + * the embargo period after which the READ permission will be + * active. + * @param dso the DSpaceObject on which to grant the permission. + * @return the builder properly configured to retain read permission on the + * object only for the specified group. */ - protected > B setEmbargo(String embargoPeriod, DSpaceObject dso) { + protected > B setEmbargo(Period embargoPeriod, DSpaceObject dso) { // add policy just for anonymous try { - MutablePeriod period = PeriodFormat.getDefault().parseMutablePeriod(embargoPeriod); - Date embargoDate = DateTime.now(DateTimeZone.UTC).plus(period).toDate(); + Instant embargoInstant = LocalDate.now() + .plus(embargoPeriod) + .atStartOfDay() + .atZone(ZoneId.systemDefault()) + .toInstant(); + Date embargoDate = Date.from(embargoInstant); return setOnlyReadPermission(dso, groupService.findByName(context, Group.ANONYMOUS), embargoDate); } catch (Exception e) { @@ -135,14 +141,19 @@ protected > B setEmbargo(String embargo } /** - * Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other - * READ permissions will be removed + * Support method to grant the {@link Constants#READ} permission over an + * object only to a specific group.Any other READ permissions will be + * removed. * + * @param type of this Builder. * @param dso * the DSpaceObject on which grant the permission * @param group * the EPersonGroup that will be granted of the permission - * @return the builder properly configured to retain read permission on the object only for the specified group + * @param startDate + * the date on which access begins. + * @return the builder properly configured to retain read permission on the + * object only for the specified group. */ protected > B setOnlyReadPermission(DSpaceObject dso, Group group, Date startDate) { @@ -161,15 +172,20 @@ protected > B setOnlyReadPermission(DSp } return (B) this; } + /** - * Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other - * READ permissions will be removed + * Support method to grant the {@link Constants#READ} permission over an + * object only to a specific EPerson. Any other READ permissions will be + * removed. * + * @param type of this Builder. * @param dso * the DSpaceObject on which grant the permission * @param eperson - * the eperson that will be granted of the permission - * @return the builder properly configured to build the object with the additional admin permission + * the EPerson that will be granted of the permission + * @param startDate the date on which access begins. + * @return the builder properly configured to build the object with the + * additional admin permission. */ protected > B setAdminPermission(DSpaceObject dso, EPerson eperson, Date startDate) { @@ -191,6 +207,7 @@ protected > B setAdminPermission(DSpace /** * Support method to grant {@link Constants#REMOVE} permission to a specific eperson * + * @param type of this Builder. * @param dso * the DSpaceObject on which grant the permission * @param eperson @@ -220,6 +237,7 @@ protected > B setRemovePermissionForEpe /** * Support method to grant {@link Constants#ADD} permission to a specific eperson * + * @param type of this Builder. * @param dso * the DSpaceObject on which grant the permission * @param eperson @@ -249,6 +267,7 @@ protected > B setAddPermissionForEperso /** * Support method to grant {@link Constants#WRITE} permission to a specific eperson * + * @param type of this Builder. * @param dso * the DSpaceObject on which grant the permission * @param eperson diff --git a/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java index b8942a17d0c0..08045325b8a5 100644 --- a/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BitstreamBuilder.java @@ -10,6 +10,7 @@ import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.time.Period; import java.util.List; import org.dspace.authorize.AuthorizeException; @@ -17,7 +18,12 @@ import org.dspace.content.BitstreamFormat; import org.dspace.content.Bundle; import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; +import org.dspace.content.service.MetadataValueService; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.eperson.Group; @@ -26,8 +32,6 @@ */ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder { - public static final String ORIGINAL = "ORIGINAL"; - private Bitstream bitstream; private Item item; private Group readerGroup; @@ -49,6 +53,19 @@ public static BitstreamBuilder createBitstream(Context context, Bundle bundle, I return builder.create(context, bundle, is); } + public static BitstreamBuilder createBitstream(Context context, Item item, InputStream is, String bundleName) + throws SQLException, AuthorizeException, IOException { + BitstreamBuilder builder = new BitstreamBuilder(context); + return builder.createInRequestedBundle(context, item, is, bundleName); + } + + public static BitstreamBuilder createBitstream(Context context, Item item, InputStream is, + String bundleName, boolean iiifEnabled) + throws SQLException, AuthorizeException, IOException { + BitstreamBuilder builder = new BitstreamBuilder(context); + return builder.createInRequestedBundleWithIiifDisabled(context, item, is, bundleName, iiifEnabled); + } + private BitstreamBuilder create(Context context, Item item, InputStream is) throws SQLException, AuthorizeException, IOException { this.context = context; @@ -70,6 +87,66 @@ private BitstreamBuilder create(Context context, Bundle bundle, InputStream is) return this; } + private BitstreamBuilder createInRequestedBundle(Context context, Item item, InputStream is, String bundleName) + throws SQLException, AuthorizeException, IOException { + this.context = context; + this.item = item; + + Bundle bundle = getBundleByName(item, bundleName); + + bitstream = bitstreamService.create(context, bundle, is); + + return this; + } + + private BitstreamBuilder createInRequestedBundleWithIiifDisabled(Context context, Item item, InputStream is, + String bundleName, boolean iiifEnabled) + throws SQLException, AuthorizeException, IOException { + this.context = context; + this.item = item; + + Bundle bundle = getBundleByNameAndIiiEnabled(item, bundleName, iiifEnabled); + + bitstream = bitstreamService.create(context, bundle, is); + + return this; + } + + private Bundle getBundleByNameAndIiiEnabled(Item item, String bundleName, boolean iiifEnabled) + throws SQLException, AuthorizeException { + List bundles = itemService.getBundles(item, bundleName); + Bundle targetBundle = null; + + if (bundles.size() < 1) { + // not found, create a new one + targetBundle = bundleService.create(context, item, bundleName); + MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); + MetadataField iiifEnabledField = metadataFieldService. + findByString(context, "dspace.iiif.enabled", '.'); + MetadataValue metadataValue = metadataValueService.create(context, targetBundle, iiifEnabledField); + metadataValue.setValue(String.valueOf(iiifEnabled)); + + } else { + // put bitstreams into first bundle + targetBundle = bundles.iterator().next(); + } + return targetBundle; + } + + + private Bundle getBundleByName(Item item, String bundleName) throws SQLException, AuthorizeException { + List bundles = itemService.getBundles(item, bundleName); + Bundle targetBundle = null; + + if (bundles.size() < 1) { + // not found, create a new one + targetBundle = bundleService.create(context, item, bundleName); + } else { + // put bitstreams into first bundle + targetBundle = bundles.iterator().next(); + } + return targetBundle; + } public BitstreamBuilder withName(String name) throws SQLException { bitstream.setName(context, name); @@ -105,13 +182,39 @@ public BitstreamBuilder withProvenance(String provenance) throws SQLException { return this; } + + public BitstreamBuilder withIIIFDisabled() throws SQLException { + bitstreamService.addMetadata(context, bitstream, "dspace", "iiif", "enabled", null, "false"); + return this; + } + + public BitstreamBuilder withIIIFLabel(String label) throws SQLException { + bitstreamService.addMetadata(context, bitstream, "iiif", "label", null, null, label); + return this; + } + + public BitstreamBuilder withIIIFCanvasWidth(int i) throws SQLException { + bitstreamService.addMetadata(context, bitstream, "iiif", "image", "width", null, String.valueOf(i)); + return this; + } + + public BitstreamBuilder withIIIFCanvasHeight(int i) throws SQLException { + bitstreamService.addMetadata(context, bitstream, "iiif", "image", "height", null, String.valueOf(i)); + return this; + } + + public BitstreamBuilder withIIIFToC(String toc) throws SQLException { + bitstreamService.addMetadata(context, bitstream, "iiif", "toc", null, null, toc); + return this; + } + private Bundle getOriginalBundle(Item item) throws SQLException, AuthorizeException { - List bundles = itemService.getBundles(item, ORIGINAL); + List bundles = itemService.getBundles(item, Constants.CONTENT_BUNDLE_NAME); Bundle targetBundle = null; if (bundles.size() < 1) { // not found, create a new one - targetBundle = bundleService.create(context, item, ORIGINAL); + targetBundle = bundleService.create(context, item, Constants.CONTENT_BUNDLE_NAME); } else { // put bitstreams into first bundle targetBundle = bundles.iterator().next(); @@ -120,7 +223,7 @@ private Bundle getOriginalBundle(Item item) throws SQLException, AuthorizeExcept return targetBundle; } - public BitstreamBuilder withEmbargoPeriod(String embargoPeriod) { + public BitstreamBuilder withEmbargoPeriod(Period embargoPeriod) { return setEmbargo(embargoPeriod, bitstream); } @@ -154,6 +257,7 @@ public Bitstream build() { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup bitstream = c.reloadEntity(bitstream); @@ -168,4 +272,5 @@ public void cleanup() throws Exception { protected DSpaceObjectService getService() { return bitstreamService; } -} \ No newline at end of file + +} diff --git a/dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java index 1051712326c6..a13783ceef84 100644 --- a/dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BitstreamFormatBuilder.java @@ -34,6 +34,7 @@ protected BitstreamFormatBuilder(Context context) { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup bitstreamFormat = c.reloadEntity(bitstreamFormat); diff --git a/dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java b/dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java index 614cd54c6df4..1776921ac652 100644 --- a/dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/BundleBuilder.java @@ -55,6 +55,7 @@ public BundleBuilder withBitstream(Bitstream bitstream) { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup bundle = c.reloadEntity(bundle); diff --git a/dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java index 63c03c4a919c..aed712f2d2b9 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ClaimedTaskBuilder.java @@ -124,6 +124,7 @@ private void deleteWsi(Context c, WorkspaceItem dso) throws Exception { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup workspaceItem = c.reloadEntity(workspaceItem); diff --git a/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java index eece873380e1..f287c7aa8d32 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CollectionBuilder.java @@ -7,6 +7,8 @@ */ package org.dspace.builder; +import static org.dspace.core.Constants.DEFAULT_ITEM_READ; + import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -15,6 +17,7 @@ import org.apache.commons.io.IOUtils; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.MetadataSchemaEnum; @@ -100,6 +103,10 @@ public CollectionBuilder withName(final String name) { return setMetadataSingleValue(collection, MetadataSchemaEnum.DC.getName(), "title", null, name); } + public CollectionBuilder withEntityType(final String entityType) { + return setMetadataSingleValue(collection, "dspace", "entity", "type", entityType); + } + /** * Set the name of the Collection in the given language. * @@ -233,6 +240,28 @@ public CollectionBuilder withAdminGroup(EPerson... members) throws SQLException, return this; } + /** + * remove the resource policies with type DEFAULT_ITEM_READ and + * add new policy with type DEFAULT_ITEM_READ of + * the new group to current collection. + * + * @param group the group + * @return this builder + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + */ + public CollectionBuilder withDefaultItemRead(Group group) throws SQLException, AuthorizeException { + resourcePolicyService.removePolicies(context, collection, DEFAULT_ITEM_READ); + + ResourcePolicy resourcePolicy = resourcePolicyService.create(context); + resourcePolicy.setGroup(group); + resourcePolicy.setAction(DEFAULT_ITEM_READ); + resourcePolicy.setdSpaceObject(collection); + resourcePolicyService.update(context, resourcePolicy); + return this; + } + + @Override public Collection build() { try { @@ -249,6 +278,7 @@ public Collection build() { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup collection = c.reloadEntity(collection); diff --git a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java index 5ba36af8f4a3..dfacd0cec3d1 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java @@ -32,27 +32,38 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { private Community community; + protected CommunityBuilder(Context context) { super(context); } public static CommunityBuilder createCommunity(final Context context) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.create(); + return builder.create(null); + } + public static CommunityBuilder createCommunity(final Context context, String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.create(handle); } - private CommunityBuilder create() { - return createSubCommunity(context, null); + private CommunityBuilder create(String handle) { + return createSubCommunity(context, null, handle); } public static CommunityBuilder createSubCommunity(final Context context, final Community parent) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.createSub(parent); + return builder.createSub(parent, null); } - private CommunityBuilder createSub(final Community parent) { + public static CommunityBuilder createSubCommunity(final Context context, final Community parent, + final String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.createSub(parent, handle); + } + + private CommunityBuilder createSub(final Community parent, String handle) { try { - community = communityService.create(parent, context); + community = communityService.create(parent, context, handle); } catch (Exception e) { e.printStackTrace(); return null; @@ -102,6 +113,7 @@ public CommunityBuilder addParentCommunity(final Context context, final Communit @Override public Community build() { try { + communityService.update(context, community); context.dispatchEvents(); @@ -116,6 +128,7 @@ public Community build() { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup community = c.reloadEntity(community); diff --git a/dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java b/dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java index 2010aef2c124..a28462eea74c 100644 --- a/dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/EPersonBuilder.java @@ -32,6 +32,7 @@ protected EPersonBuilder(Context context) { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup ePerson = c.reloadEntity(ePerson); @@ -128,6 +129,16 @@ public EPersonBuilder withCanLogin(final boolean canLogin) { return this; } + public EPersonBuilder withOrcid(final String orcid) { + setMetadataSingleValue(ePerson, "eperson", "orcid", null, orcid); + return this; + } + + public EPersonBuilder withOrcidScope(final String scope) { + addMetadataValue(ePerson, "eperson", "orcid", "scope", scope); + return this; + } + public static void deleteEPerson(UUID uuid) throws SQLException, IOException { try (Context c = new Context()) { c.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java b/dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java index ef3c840bc2d3..36d9654adf0d 100644 --- a/dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/EntityTypeBuilder.java @@ -36,6 +36,7 @@ protected EntityTypeService getService() { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup entityType = c.reloadEntity(entityType); diff --git a/dspace-api/src/test/java/org/dspace/builder/GroupBuilder.java b/dspace-api/src/test/java/org/dspace/builder/GroupBuilder.java index cbee3e57a189..b3447dd8bd9a 100644 --- a/dspace-api/src/test/java/org/dspace/builder/GroupBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/GroupBuilder.java @@ -35,6 +35,7 @@ protected GroupBuilder(Context context) { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup group = c.reloadEntity(group); diff --git a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java index c4bdbe7d548f..f4f504e60fad 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java @@ -7,20 +7,29 @@ */ package org.dspace.builder; +import static org.dspace.content.LicenseUtils.getLicenseText; +import static org.dspace.content.MetadataSchemaEnum.DC; +import static org.dspace.content.authority.Choices.CF_ACCEPTED; + import java.io.IOException; import java.sql.SQLException; +import java.time.Period; import java.util.UUID; import org.dspace.authorize.AuthorizeException; import org.dspace.content.Collection; import org.dspace.content.DCDate; import org.dspace.content.Item; +import org.dspace.content.LicenseUtils; import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.WorkspaceItem; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; +import org.dspace.profile.OrcidEntitySyncPreference; +import org.dspace.profile.OrcidProfileSyncPreference; +import org.dspace.profile.OrcidSynchronizationMode; /** * Builder to construct Item objects @@ -31,6 +40,7 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { private boolean withdrawn = false; + private String handle = null; private WorkspaceItem workspaceItem; private Item item; private Group readerGroup = null; @@ -48,7 +58,7 @@ private ItemBuilder create(final Context context, final Collection col) { this.context = context; try { - workspaceItem = workspaceItemService.create(context, col, false); + workspaceItem = workspaceItemService.create(context, col, true); item = workspaceItem.getItem(); } catch (Exception e) { return handleException(e); @@ -73,11 +83,48 @@ public ItemBuilder withIdentifierOther(final String identifierOther) { public ItemBuilder withAuthor(final String authorName) { return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", authorName); } + public ItemBuilder withAuthor(final String authorName, final String authority, final int confidence) { return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", null, authorName, authority, confidence); } + public ItemBuilder withEditor(final String editorName) { + return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "editor", editorName); + } + + public ItemBuilder withDescriptionAbstract(String description) { + return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "description", "abstract", description); + } + + public ItemBuilder withLanguage(String language) { + return addMetadataValue(item, "dc", "language", "iso", language); + } + + public ItemBuilder withIsPartOf(String isPartOf) { + return addMetadataValue(item, "dc", "relation", "ispartof", isPartOf); + } + + public ItemBuilder withDoiIdentifier(String doi) { + return addMetadataValue(item, "dc", "identifier", "doi", doi); + } + + public ItemBuilder withScopusIdentifier(String scopus) { + return addMetadataValue(item, "dc", "identifier", "scopus", scopus); + } + + public ItemBuilder withRelationFunding(String funding) { + return addMetadataValue(item, "dc", "relation", "funding", funding); + } + + public ItemBuilder withRelationFunding(String funding, String authority) { + return addMetadataValue(item, DC.getName(), "relation", "funding", null, funding, authority, 600); + } + + public ItemBuilder withRelationGrantno(String grantno) { + return addMetadataValue(item, "dc", "relation", "grantno", grantno); + } + public ItemBuilder withPersonIdentifierFirstName(final String personIdentifierFirstName) { return addMetadataValue(item, "person", "givenName", null, personIdentifierFirstName); } @@ -95,10 +142,6 @@ public ItemBuilder withSubject(final String subject, final String authority, fin subject, authority, confidence); } - public ItemBuilder withEntityType(final String entityType) { - return addMetadataValue(item, "dspace", "entity", "type", entityType); - } - public ItemBuilder withType(final String type) { return addMetadataValue(item, "dc", "type", null, type); } @@ -115,19 +158,132 @@ public ItemBuilder withProvenanceData(final String provenanceData) { return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "description", "provenance", provenanceData); } + public ItemBuilder enableIIIF() { + return addMetadataValue(item, "dspace", "iiif", "enabled", "true"); + } + + public ItemBuilder disableIIIF() { + return addMetadataValue(item, "dspace", "iiif", "enabled", "false"); + } + + public ItemBuilder enableIIIFSearch() { + return addMetadataValue(item, "iiif", "search", "enabled", "true"); + } + + public ItemBuilder withIIIFViewingHint(String hint) { + return addMetadataValue(item, "iiif", "viewing", "hint", hint); + } + + public ItemBuilder withIIIFCanvasNaming(String naming) { + return addMetadataValue(item, "iiif", "canvas", "naming", naming); + } + + public ItemBuilder withIIIFCanvasWidth(int i) { + return addMetadataValue(item, "iiif", "image", "width", String.valueOf(i)); + } + + public ItemBuilder withIIIFCanvasHeight(int i) { + return addMetadataValue(item, "iiif", "image", "height", String.valueOf(i)); + } + public ItemBuilder withMetadata(final String schema, final String element, final String qualifier, final String value) { return addMetadataValue(item, schema, element, qualifier, value); } + public ItemBuilder withDspaceObjectOwner(String value, String authority) { + return addMetadataValue(item, "dspace", "object", "owner", null, value, authority, CF_ACCEPTED); + } + + public ItemBuilder withOrcidIdentifier(String orcid) { + return addMetadataValue(item, "person", "identifier", "orcid", orcid); + } + + public ItemBuilder withOrcidAccessToken(String accessToken, EPerson owner) { + + try { + + OrcidTokenBuilder.create(context, owner, accessToken) + .withProfileItem(item) + .build(); + + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e); + } + + return this; + + } + + public ItemBuilder withOrcidAuthenticated(String authenticated) { + return addMetadataValue(item, "dspace", "orcid", "authenticated", authenticated); + } + + public ItemBuilder withOrcidSynchronizationPublicationsPreference(OrcidEntitySyncPreference value) { + return withOrcidSynchronizationPublicationsPreference(value.name()); + } + + public ItemBuilder withOrcidSynchronizationPublicationsPreference(String value) { + return setMetadataSingleValue(item, "dspace", "orcid", "sync-publications", value); + } + + public ItemBuilder withOrcidSynchronizationFundingsPreference(OrcidEntitySyncPreference value) { + return withOrcidSynchronizationFundingsPreference(value.name()); + } + + public ItemBuilder withOrcidSynchronizationFundingsPreference(String value) { + return setMetadataSingleValue(item, "dspace", "orcid", "sync-fundings", value); + } + + public ItemBuilder withOrcidSynchronizationProfilePreference(OrcidProfileSyncPreference value) { + return withOrcidSynchronizationProfilePreference(value.name()); + } + + public ItemBuilder withOrcidSynchronizationProfilePreference(String value) { + return addMetadataValue(item, "dspace", "orcid", "sync-profile", value); + } + + public ItemBuilder withOrcidSynchronizationMode(OrcidSynchronizationMode mode) { + return withOrcidSynchronizationMode(mode.name()); + } + + private ItemBuilder withOrcidSynchronizationMode(String mode) { + return setMetadataSingleValue(item, "dspace", "orcid", "sync-mode", mode); + } + + public ItemBuilder withPersonCountry(String country) { + return addMetadataValue(item, "person", "country", null, country); + } + + public ItemBuilder withScopusAuthorIdentifier(String id) { + return addMetadataValue(item, "person", "identifier", "scopus-author-id", id); + } + + public ItemBuilder withResearcherIdentifier(String rid) { + return addMetadataValue(item, "person", "identifier", "rid", rid); + } + + public ItemBuilder withVernacularName(String vernacularName) { + return setMetadataSingleValue(item, "person", "name", "translated", vernacularName); + } + + public ItemBuilder withVariantName(String variant) { + return addMetadataValue(item, "person", "name", "variant", variant); + } + public ItemBuilder makeUnDiscoverable() { item.setDiscoverable(false); return this; } + public ItemBuilder withHandle(String handle) { + this.handle = handle; + return this; + } + /** - * Withdrawn the item under build. Please note that an user need to be loggedin the context to avoid NPE during the - * creation of the provenance metadata + * Withdraw the item under build. Please note that the Context must be + * logged in to avoid NPE during the creation of the provenance metadata. * * @return the ItemBuilder */ @@ -136,7 +292,13 @@ public ItemBuilder withdrawn() { return this; } - public ItemBuilder withEmbargoPeriod(String embargoPeriod) { + /** + * Set an embargo to end after some time from "now". + * + * @param embargoPeriod embargo starting "now", for this long. + * @return the ItemBuilder. + */ + public ItemBuilder withEmbargoPeriod(Period embargoPeriod) { return setEmbargo(embargoPeriod, item); } @@ -145,10 +307,62 @@ public ItemBuilder withReaderGroup(Group group) { return this; } + public ItemBuilder withOrgUnitLegalName(String name) { + return addMetadataValue(item, "organization", "legalName", null, name); + } + + public ItemBuilder withOrgUnitCountry(String addressCountry) { + return addMetadataValue(item, "organization", "address", "addressCountry", addressCountry); + } + + public ItemBuilder withOrgUnitLocality(String addressLocality) { + return addMetadataValue(item, "organization", "address", "addressLocality", addressLocality); + } + + public ItemBuilder withOrgUnitCrossrefIdentifier(String crossrefid) { + return addMetadataValue(item, "organization", "identifier", "crossrefid", crossrefid); + } + + public ItemBuilder withProjectStartDate(String startDate) { + return addMetadataValue(item, "project", "startDate", null, startDate); + } + + public ItemBuilder withProjectEndDate(String endDate) { + return addMetadataValue(item, "project", "endDate", null, endDate); + } + + public ItemBuilder withProjectInvestigator(String investigator) { + return addMetadataValue(item, "project", "investigator", null, investigator); + } + + public ItemBuilder withDescription(String description) { + return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "description", null, description); + } + + public ItemBuilder withProjectAmount(String amount) { + return addMetadataValue(item, "project", "amount", null, amount); + } + + public ItemBuilder withProjectAmountCurrency(String currency) { + return addMetadataValue(item, "project", "amount", "currency", currency); + } + + public ItemBuilder withUriIdentifier(String uri) { + return addMetadataValue(item, "dc", "identifier", "uri", uri); + } + + public ItemBuilder withIdentifier(String identifier) { + return addMetadataValue(item, "dc", "identifier", null, identifier); + } + + public ItemBuilder withOtherIdentifier(String identifier) { + return addMetadataValue(item, "dc", "identifier", "other", identifier); + } + /** - * Create an admin group for the collection with the specified members + * Assign the admin permission to the specified eperson * - * @param members epersons to add to the admin group + * @param ePerson the eperson that will get the ADMIN permission on the item * @return this builder * @throws SQLException * @throws AuthorizeException @@ -157,11 +371,14 @@ public ItemBuilder withAdminUser(EPerson ePerson) throws SQLException, Authorize return setAdminPermission(item, ePerson, null); } + public ItemBuilder withPersonEmail(String email) { + return addMetadataValue(item, "person", "email", null, email); + } @Override public Item build() { try { - installItemService.installItem(context, workspaceItem); + installItemService.installItem(context, workspaceItem, this.handle); itemService.update(context, item); //Check if we need to make this item private. This has to be done after item install. @@ -185,13 +402,19 @@ public Item build() { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); + // If the workspaceItem used to create this item still exists, delete it + workspaceItem = c.reloadEntity(workspaceItem); + if (workspaceItem != null) { + workspaceItemService.deleteAll(c, workspaceItem); + } // Ensure object and any related objects are reloaded before checking to see what needs cleanup item = c.reloadEntity(item); if (item != null) { delete(c, item); - c.complete(); } + c.complete(); } } @@ -220,4 +443,18 @@ public static void deleteItem(UUID uuid) throws SQLException, IOException { c.complete(); } } + + public ItemBuilder grantLicense() { + String license; + try { + EPerson submitter = workspaceItem.getSubmitter(); + submitter = context.reloadEntity(submitter); + license = getLicenseText(context.getCurrentLocale(), workspaceItem.getCollection(), item, submitter); + LicenseUtils.grantLicense(context, item, license, null); + } catch (Exception e) { + handleException(e); + } + return this; + } + } diff --git a/dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java b/dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java index dfc9112a3f07..52acf9d5ed16 100644 --- a/dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/MetadataFieldBuilder.java @@ -38,6 +38,7 @@ protected MetadataFieldService getService() { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup metadataField = c.reloadEntity(metadataField); diff --git a/dspace-api/src/test/java/org/dspace/builder/MetadataSchemaBuilder.java b/dspace-api/src/test/java/org/dspace/builder/MetadataSchemaBuilder.java index 10e1894ee4ed..50b07b64550a 100644 --- a/dspace-api/src/test/java/org/dspace/builder/MetadataSchemaBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/MetadataSchemaBuilder.java @@ -37,6 +37,7 @@ protected MetadataSchemaService getService() { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup metadataSchema = c.reloadEntity(metadataSchema); diff --git a/dspace-api/src/test/java/org/dspace/builder/OrcidHistoryBuilder.java b/dspace-api/src/test/java/org/dspace/builder/OrcidHistoryBuilder.java new file mode 100644 index 000000000000..199f412f8506 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/OrcidHistoryBuilder.java @@ -0,0 +1,152 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.builder; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Date; + +import org.apache.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.service.OrcidHistoryService; +/** + * Builder to construct OrcidHistory objects + * + * @author Mykhaylo Boychuk (4science) + */ +public class OrcidHistoryBuilder extends AbstractBuilder { + + private static final Logger log = Logger.getLogger(OrcidHistoryBuilder.class); + + private OrcidHistory orcidHistory; + + protected OrcidHistoryBuilder(Context context) { + super(context); + } + + @Override + protected OrcidHistoryService getService() { + return orcidHistoryService; + } + + @Override + public void cleanup() throws Exception { + delete(orcidHistory); + } + + public static OrcidHistoryBuilder createOrcidHistory(Context context, Item profileItem, Item entity) { + OrcidHistoryBuilder builder = new OrcidHistoryBuilder(context); + return builder.create(context, profileItem, entity); + } + + private OrcidHistoryBuilder create(Context context, Item profileItem, Item entity) { + try { + this.context = context; + this.orcidHistory = getService().create(context, profileItem, entity); + } catch (Exception e) { + log.error("Error in OrcidHistoryBuilder.create(..), error: ", e); + } + return this; + } + + @Override + public OrcidHistory build() throws SQLException { + try { + getService().update(context, orcidHistory); + context.dispatchEvents(); + + indexingService.commit(); + } catch (Exception e) { + log.error("Error in OrcidHistoryBuilder.build(), error: ", e); + } + return orcidHistory; + } + + @Override + public void delete(Context c, OrcidHistory orcidHistory) throws Exception { + if (orcidHistory != null) { + getService().delete(c, orcidHistory); + } + } + + /** + * Delete the Test OrcidHistory referred to by the given ID + * + * @param id Integer of Test OrcidHistory to delete + * @throws SQLException + * @throws IOException + */ + public static void deleteOrcidHistory(Integer id) throws SQLException, IOException { + if (id == null) { + return; + } + + try (Context c = new Context()) { + OrcidHistory orcidHistory = orcidHistoryService.find(c, id); + if (orcidHistory != null) { + orcidHistoryService.delete(c, orcidHistory); + } + c.complete(); + } + } + + public void delete(OrcidHistory orcidHistory) throws Exception { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + OrcidHistory attachedTab = c.reloadEntity(orcidHistory); + if (attachedTab != null) { + getService().delete(c, attachedTab); + } + c.complete(); + } + indexingService.commit(); + } + + public OrcidHistoryBuilder withResponseMessage(String responseMessage) throws SQLException { + orcidHistory.setResponseMessage(responseMessage); + return this; + } + + public OrcidHistoryBuilder withPutCode(String putCode) throws SQLException { + orcidHistory.setPutCode(putCode); + return this; + } + + public OrcidHistoryBuilder withStatus(Integer status) throws SQLException { + orcidHistory.setStatus(status); + return this; + } + + public OrcidHistoryBuilder withMetadata(String metadata) throws SQLException { + orcidHistory.setMetadata(metadata); + return this; + } + + public OrcidHistoryBuilder withRecordType(String recordType) throws SQLException { + orcidHistory.setRecordType(recordType); + return this; + } + + public OrcidHistoryBuilder withOperation(OrcidOperation operation) throws SQLException { + orcidHistory.setOperation(operation); + return this; + } + + public OrcidHistoryBuilder withDescription(String description) throws SQLException { + orcidHistory.setDescription(description); + return this; + } + + public OrcidHistoryBuilder withTimestamp(Date timestamp) { + orcidHistory.setTimestamp(timestamp); + return this; + } +} diff --git a/dspace-api/src/test/java/org/dspace/builder/OrcidQueueBuilder.java b/dspace-api/src/test/java/org/dspace/builder/OrcidQueueBuilder.java new file mode 100644 index 000000000000..bbc0e0e53208 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/OrcidQueueBuilder.java @@ -0,0 +1,146 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.builder; + +import java.sql.SQLException; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.service.OrcidQueueService; + +/** + * Builder to construct OrcidQueue objects + * + * @author Mykhaylo Boychuk (4science) + */ +public class OrcidQueueBuilder extends AbstractBuilder { + + private OrcidQueue orcidQueue; + + protected OrcidQueueBuilder(Context context) { + super(context); + } + + @Override + protected OrcidQueueService getService() { + return orcidQueueService; + } + + @Override + public void cleanup() throws Exception { + delete(orcidQueue); + } + + public static OrcidQueueBuilder createOrcidQueue(Context context, Item profileItem, Item entity) { + OrcidQueueBuilder builder = new OrcidQueueBuilder(context); + return builder.createEntityInsertionRecord(context, profileItem, entity); + } + + public static OrcidQueueBuilder createOrcidQueue(Context context, Item profileItem, Item entity, String putCode) { + OrcidQueueBuilder builder = new OrcidQueueBuilder(context); + return builder.createEntityUpdateRecord(context, profileItem, entity, putCode); + } + + public static OrcidQueueBuilder createOrcidQueue(Context context, Item profileItem, String description, + String type, String putCode) { + OrcidQueueBuilder builder = new OrcidQueueBuilder(context); + return builder.createEntityDeletionRecord(context, profileItem, description, type, putCode); + } + + private OrcidQueueBuilder createEntityDeletionRecord(Context context, Item profileItem, + String description, String type, String putCode) { + try { + this.context = context; + this.orcidQueue = getService().createEntityDeletionRecord(context, profileItem, description, type, putCode); + } catch (Exception e) { + throw new RuntimeException(e); + } + return this; + } + + private OrcidQueueBuilder createEntityUpdateRecord(Context context, Item profileItem, Item entity, String putCode) { + try { + this.context = context; + this.orcidQueue = getService().createEntityUpdateRecord(context, profileItem, entity, putCode); + } catch (Exception e) { + throw new RuntimeException(e); + } + return this; + } + + private OrcidQueueBuilder createEntityInsertionRecord(Context context, Item profileItem, Item entity) { + try { + this.context = context; + this.orcidQueue = getService().createEntityInsertionRecord(context, profileItem, entity); + } catch (Exception e) { + throw new RuntimeException(e); + } + return this; + } + + @Override + public OrcidQueue build() throws SQLException, AuthorizeException { + try { + getService().update(context, orcidQueue); + context.dispatchEvents(); + + indexingService.commit(); + } catch (Exception e) { + throw new RuntimeException(e); + } + return orcidQueue; + } + + public OrcidQueueBuilder withPutCode(String putCode) { + orcidQueue.setPutCode(putCode); + return this; + } + + public OrcidQueueBuilder withMetadata(String metadata) throws SQLException { + orcidQueue.setMetadata(metadata); + return this; + } + + public OrcidQueueBuilder withRecordType(String recordType) throws SQLException { + orcidQueue.setRecordType(recordType); + return this; + } + + public OrcidQueueBuilder withOperation(OrcidOperation operation) throws SQLException { + orcidQueue.setOperation(operation); + return this; + } + + public OrcidQueueBuilder withDescription(String description) throws SQLException { + orcidQueue.setDescription(description); + return this; + } + + @Override + public void delete(Context c, OrcidQueue orcidQueue) throws Exception { + if (orcidQueue != null) { + getService().delete(c, orcidQueue); + } + } + + public void delete(OrcidQueue orcidQueue) throws Exception { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + OrcidQueue attachedTab = c.reloadEntity(orcidQueue); + if (attachedTab != null) { + getService().delete(c, attachedTab); + } + c.complete(); + } + indexingService.commit(); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/builder/OrcidTokenBuilder.java b/dspace-api/src/test/java/org/dspace/builder/OrcidTokenBuilder.java new file mode 100644 index 000000000000..e3e149a9ec09 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/OrcidTokenBuilder.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.builder; + +import java.sql.SQLException; + +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidToken; +import org.dspace.orcid.service.OrcidTokenService; + +/** + * Builder for {@link OrcidToken} entities. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidTokenBuilder extends AbstractBuilder { + + private OrcidToken orcidToken; + + protected OrcidTokenBuilder(Context context) { + super(context); + } + + public static OrcidTokenBuilder create(Context context, EPerson ePerson, String accessToken) { + OrcidTokenBuilder builder = new OrcidTokenBuilder(context); + builder.create(ePerson, accessToken); + return builder; + } + + private void create(EPerson ePerson, String accessToken) { + orcidToken = orcidTokenService.create(context, ePerson, accessToken); + } + + public OrcidTokenBuilder withProfileItem(Item profileItem) { + orcidToken.setProfileItem(profileItem); + return this; + } + + @Override + public OrcidToken build() throws SQLException, AuthorizeException { + return orcidToken; + } + + @Override + public void delete(Context c, OrcidToken orcidToken) throws Exception { + orcidTokenService.delete(c, orcidToken); + } + + @Override + public void cleanup() throws Exception { + try (Context context = new Context()) { + context.setDispatcher("noindex"); + context.turnOffAuthorisationSystem(); + orcidToken = context.reloadEntity(orcidToken); + if (orcidToken != null) { + delete(context, orcidToken); + context.complete(); + } + } + } + + @Override + protected OrcidTokenService getService() { + return orcidTokenService; + } + +} diff --git a/dspace-api/src/test/java/org/dspace/builder/PoolTaskBuilder.java b/dspace-api/src/test/java/org/dspace/builder/PoolTaskBuilder.java index c0de46e1e5c1..633d025f2e48 100644 --- a/dspace-api/src/test/java/org/dspace/builder/PoolTaskBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/PoolTaskBuilder.java @@ -104,6 +104,7 @@ private void deleteWsi(Context c, WorkspaceItem dso) throws Exception { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup workspaceItem = c.reloadEntity(workspaceItem); diff --git a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java index 6970cd57c3d0..fe8f7b8167af 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java @@ -11,12 +11,15 @@ import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; +import java.util.Date; import java.util.List; +import java.util.Set; import org.dspace.authorize.AuthorizeException; import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.Process; import org.dspace.scripts.service.ProcessService; @@ -33,14 +36,22 @@ public static ProcessBuilder createProcess(Context context, EPerson ePerson, Str List parameters) throws SQLException { ProcessBuilder processBuilder = new ProcessBuilder(context); - return processBuilder.create(context, ePerson, scriptName, parameters); + return processBuilder.create(context, ePerson, scriptName, parameters, null); + } + + public static ProcessBuilder createProcess(Context context, EPerson ePerson, String scriptName, + List parameters, + Set specialGroups) + throws SQLException { + ProcessBuilder processBuilder = new ProcessBuilder(context); + return processBuilder.create(context, ePerson, scriptName, parameters, specialGroups); } private ProcessBuilder create(Context context, EPerson ePerson, String scriptName, - List parameters) + List parameters, final Set specialGroups) throws SQLException { this.context = context; - this.process = processService.create(context, ePerson, scriptName, parameters); + this.process = processService.create(context, ePerson, scriptName, parameters, specialGroups); this.process.setProcessStatus(ProcessStatus.SCHEDULED); return this; } @@ -50,16 +61,22 @@ public ProcessBuilder withProcessStatus(ProcessStatus processStatus) { return this; } + public ProcessBuilder withCreationTime(Date creationTime) { + process.setCreationTime(creationTime); + return this; + } + public ProcessBuilder withStartAndEndTime(String startTime, String endTime) throws ParseException { SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd/MM/yyyy"); - process.setStartTime(simpleDateFormat.parse(startTime)); - process.setFinishedTime(simpleDateFormat.parse(endTime)); + process.setStartTime(startTime == null ? null : simpleDateFormat.parse(startTime)); + process.setFinishedTime(endTime == null ? null : simpleDateFormat.parse(endTime)); return this; } @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup process = c.reloadEntity(process); @@ -96,6 +113,9 @@ public void delete(Context c, Process dso) throws Exception { } public static void deleteProcess(Integer integer) throws SQLException, IOException { + if (integer == null) { + return; + } try (Context c = new Context()) { c.turnOffAuthorisationSystem(); Process process = processService.find(c, integer); diff --git a/dspace-api/src/test/java/org/dspace/builder/QAEventBuilder.java b/dspace-api/src/test/java/org/dspace/builder/QAEventBuilder.java new file mode 100644 index 000000000000..823080516df8 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/QAEventBuilder.java @@ -0,0 +1,141 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.builder; + +import java.util.Date; + +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.QAEvent; +import org.dspace.core.Context; +import org.dspace.qaevent.service.QAEventService; + +/** + * Builder to construct Quality Assurance Broker Event objects + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +public class QAEventBuilder extends AbstractBuilder { + + private Item item; + private QAEvent target; + private String source = QAEvent.OPENAIRE_SOURCE; + /** + * the title of the DSpace object + * */ + private String title; + /** + * the name of the Quality Assurance Event Topic + * */ + private String topic; + /** + * thr original QA Event imported + * */ + private String message; + /** + * uuid of the targeted DSpace object + * */ + private String relatedItem; + private double trust = 0.5; + private Date lastUpdate = new Date(); + + protected QAEventBuilder(Context context) { + super(context); + } + + public static QAEventBuilder createTarget(final Context context, final Collection col, final String name) { + QAEventBuilder builder = new QAEventBuilder(context); + return builder.create(context, col, name); + } + + public static QAEventBuilder createTarget(final Context context, final Item item) { + QAEventBuilder builder = new QAEventBuilder(context); + return builder.create(context, item); + } + + private QAEventBuilder create(final Context context, final Collection col, final String name) { + this.context = context; + + try { + ItemBuilder itemBuilder = ItemBuilder.createItem(context, col).withTitle(name); + item = itemBuilder.build(); + this.title = name; + context.dispatchEvents(); + indexingService.commit(); + } catch (Exception e) { + return handleException(e); + } + return this; + } + + private QAEventBuilder create(final Context context, final Item item) { + this.context = context; + this.item = item; + return this; + } + + public QAEventBuilder withTopic(final String topic) { + this.topic = topic; + return this; + } + public QAEventBuilder withSource(final String source) { + this.source = source; + return this; + } + public QAEventBuilder withTitle(final String title) { + this.title = title; + return this; + } + public QAEventBuilder withMessage(final String message) { + this.message = message; + return this; + } + public QAEventBuilder withTrust(final double trust) { + this.trust = trust; + return this; + } + public QAEventBuilder withLastUpdate(final Date lastUpdate) { + this.lastUpdate = lastUpdate; + return this; + } + + public QAEventBuilder withRelatedItem(String relatedItem) { + this.relatedItem = relatedItem; + return this; + } + + @Override + public QAEvent build() { + target = new QAEvent(source, "oai:www.dspace.org:" + item.getHandle(), item.getID().toString(), title, topic, + trust, message, lastUpdate); + target.setRelated(relatedItem); + try { + qaEventService.store(context, target); + } catch (Exception e) { + e.printStackTrace(); + } + return target; + } + + @Override + public void cleanup() throws Exception { + qaEventService.deleteEventByEventId(target.getEventId()); + } + + @Override + protected QAEventService getService() { + return qaEventService; + } + + @Override + public void delete(Context c, QAEvent dso) throws Exception { + qaEventService.deleteEventByEventId(target.getEventId()); + +// qaEventService.deleteTarget(dso); + } +} \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java b/dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java index 874603341980..c8c5cf85bf1a 100644 --- a/dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/RelationshipBuilder.java @@ -39,6 +39,7 @@ protected RelationshipService getService() { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup relationship = c.reloadEntity(relationship); @@ -106,18 +107,26 @@ public static void deleteRelationship(Integer id) throws SQLException, IOExcepti } public static RelationshipBuilder createRelationshipBuilder(Context context, Item leftItem, Item rightItem, - RelationshipType relationshipType) { + RelationshipType relationshipType, int leftPlace, int rightPlace) { RelationshipBuilder relationshipBuilder = new RelationshipBuilder(context); - return relationshipBuilder.create(context, leftItem, rightItem, relationshipType); + return relationshipBuilder.create(context, leftItem, rightItem, relationshipType, leftPlace, rightPlace); + } + + public static RelationshipBuilder createRelationshipBuilder(Context context, Item leftItem, Item rightItem, + RelationshipType relationshipType) { + + return createRelationshipBuilder(context, leftItem, rightItem, relationshipType, -1, -1); } private RelationshipBuilder create(Context context, Item leftItem, Item rightItem, - RelationshipType relationshipType) { + RelationshipType relationshipType, int leftPlace, int rightPlace) { this.context = context; try { - relationship = relationshipService.create(context, leftItem, rightItem, relationshipType, 0, 0); + //place -1 will add it to the end + relationship = relationshipService.create(context, leftItem, rightItem, relationshipType, + leftPlace, rightPlace); } catch (SQLException | AuthorizeException e) { log.warn("Failed to create relationship", e); } @@ -139,4 +148,10 @@ public RelationshipBuilder withLeftPlace(int leftPlace) { relationship.setLeftPlace(leftPlace); return this; } + + public RelationshipBuilder withLatestVersionStatus(Relationship.LatestVersionStatus latestVersionStatus) { + relationship.setLatestVersionStatus(latestVersionStatus); + return this; + } + } diff --git a/dspace-api/src/test/java/org/dspace/builder/RelationshipTypeBuilder.java b/dspace-api/src/test/java/org/dspace/builder/RelationshipTypeBuilder.java index fa1e3b4766d8..60ea385b1a88 100644 --- a/dspace-api/src/test/java/org/dspace/builder/RelationshipTypeBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/RelationshipTypeBuilder.java @@ -8,13 +8,11 @@ package org.dspace.builder; import java.sql.SQLException; -import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.content.EntityType; -import org.dspace.content.Relationship; import org.dspace.content.RelationshipType; import org.dspace.content.service.RelationshipTypeService; import org.dspace.core.Context; @@ -39,14 +37,10 @@ protected RelationshipTypeService getService() { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup relationshipType = c.reloadEntity(relationshipType); - List byRelationshipType = relationshipService - .findByRelationshipType(c, relationshipType); - for (Relationship relationship : byRelationshipType) { - relationshipService.delete(c, relationship); - } if (relationshipType != null) { delete(c, relationshipType); } diff --git a/dspace-api/src/test/java/org/dspace/builder/RequestItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/RequestItemBuilder.java new file mode 100644 index 000000000000..66e6245ff625 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/RequestItemBuilder.java @@ -0,0 +1,165 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.builder; + +import java.sql.SQLException; +import java.util.Date; +import javax.validation.constraints.NotNull; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.app.requestitem.RequestItem; +import org.dspace.app.requestitem.service.RequestItemService; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * Manage the creation and cleanup of {@link RequestItem}s for testing. + * + * @author Mark H. Wood + */ +public class RequestItemBuilder + extends AbstractBuilder { + private static final Logger LOG = LogManager.getLogger(); + + public static final String REQ_EMAIL = "jsmith@example.com"; + public static final String REQ_NAME = "John Smith"; + public static final String REQ_MESSAGE = "Please send me a copy of this."; + public static final String REQ_PATH = "test/file"; + + private RequestItem requestItem; + private Item item; + private Bitstream bitstream; + private Date decisionDate; + private boolean accepted; + + protected RequestItemBuilder(Context context) { + super(context); + } + + /** + * Initialize a RequestItem. + * + * @param ctx current DSpace session. + * @param item the requested Item. + * @param bitstream the single requested Bitstream, or null for "all files". + * @return a builder initialized for this request. + */ + public static RequestItemBuilder createRequestItem(Context ctx, + @NotNull Item item, Bitstream bitstream) { + RequestItemBuilder builder = new RequestItemBuilder(ctx); + return builder.create(item, bitstream); + } + + private RequestItemBuilder create(Item item, Bitstream bitstream) { + this.item = item; + this.bitstream = bitstream; + return this; + } + + /** + * Set the date on which a decision was made concerning this request. + * + * @param date the date of the decision. + * @return this builder. + */ + public RequestItemBuilder withDecisionDate(Date date) { + this.decisionDate = date; + return this; + } + + /** + * Set whether request has been accepted. Does not set the decision + * date. + * + * @param accepted true if request is accepted. + * @return this builder. + */ + public RequestItemBuilder withAcceptRequest(boolean accepted) { + this.accepted = accepted; + return this; + } + + @Override + public RequestItem build() { + LOG.atDebug() + .withLocation() + .log("Building request with item ID {} and bitstream ID {}", + () -> item.getID().toString(), + () -> bitstream.getID().toString()); + + String token; + try { + token = requestItemService.createRequest(context, bitstream, item, + (null == bitstream), + REQ_EMAIL, REQ_NAME, REQ_MESSAGE); + } catch (SQLException ex) { + return handleException(ex); + } + requestItem = requestItemService.findByToken(context, token); + requestItem.setAccept_request(accepted); + requestItem.setDecision_date(decisionDate); + + requestItemService.update(context, requestItem); + + return requestItem; + } + + @Override + public void cleanup() + throws Exception { + LOG.debug("cleanup()"); + try ( Context ctx = new Context(); ) { + ctx.setDispatcher("noindex"); + ctx.turnOffAuthorisationSystem(); + requestItem = ctx.reloadEntity(requestItem); + if (null != requestItem) { + delete(ctx, requestItem); + ctx.complete(); + requestItem = null; + } else { + LOG.debug("nothing to clean up."); + } + } + } + + @Override + public void delete(Context context, RequestItem request) + throws Exception { + requestItemService.delete(context, request); + } + + /** + * Delete a request identified by its token. If no such token is known, + * simply return. + * + * @param token the token identifying the request. + * @throws java.sql.SQLException passed through + */ + static public void deleteRequestItem(String token) + throws SQLException { + LOG.atDebug() + .withLocation() + .log("Delete RequestItem with token {}", token); + try (Context context = new Context()) { + RequestItem request = requestItemService.findByToken(context, token); + if (null == request) { + return; + } + requestItemService.delete(context, request); + context.complete(); + } + } + + @Override + protected RequestItemService getService() { + return requestItemService; + } +} diff --git a/dspace-api/src/test/java/org/dspace/builder/ResourcePolicyBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ResourcePolicyBuilder.java index cdb32270df44..70b1f8d73daf 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ResourcePolicyBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ResourcePolicyBuilder.java @@ -41,6 +41,7 @@ protected ResourcePolicyService getService() { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup resourcePolicy = c.reloadEntity(resourcePolicy); @@ -81,6 +82,7 @@ public ResourcePolicy build() { public void delete(ResourcePolicy rp) throws Exception { try (Context c = new Context()) { c.turnOffAuthorisationSystem(); + c.setDispatcher("noindex"); ResourcePolicy attachedDso = c.reloadEntity(rp); if (attachedDso != null) { getService().delete(c, attachedDso); diff --git a/dspace-api/src/test/java/org/dspace/builder/SubscribeBuilder.java b/dspace-api/src/test/java/org/dspace/builder/SubscribeBuilder.java new file mode 100644 index 000000000000..40e890a8c962 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/SubscribeBuilder.java @@ -0,0 +1,111 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.builder; + +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Subscription; +import org.dspace.eperson.SubscriptionParameter; +import org.dspace.eperson.service.SubscribeService; + +public class SubscribeBuilder extends AbstractBuilder { + + /* Log4j logger*/ + private static final Logger log = LogManager.getLogger(); + + private Subscription subscription; + + protected SubscribeBuilder(Context context) { + super(context); + } + + @Override + protected SubscribeService getService() { + return subscribeService; + } + + @Override + public void cleanup() throws Exception { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + subscription = c.reloadEntity(subscription); + if (subscription != null) { + delete(c, subscription); + } + c.complete(); + indexingService.commit(); + } + } + + public static void deleteSubscription(int id) throws Exception { + try (Context c = new Context()) { + c.turnOffAuthorisationSystem(); + Subscription subscription = subscribeService.findById(c, id); + if (Objects.nonNull(subscription)) { + try { + subscribeService.deleteSubscription(c, subscription); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + c.complete(); + } + indexingService.commit(); + } + + @Override + public Subscription build() { + try { + + context.dispatchEvents(); + + indexingService.commit(); + } catch (SearchServiceException e) { + log.error(e); + } + return subscription; + } + + public static SubscribeBuilder subscribeBuilder(final Context context, String type, DSpaceObject dSpaceObject, + EPerson ePerson, List subscriptionParameterList) { + SubscribeBuilder builder = new SubscribeBuilder(context); + return builder.create(context, type, dSpaceObject, ePerson, subscriptionParameterList); + } + + private SubscribeBuilder create(Context context, String type, DSpaceObject dSpaceObject, EPerson ePerson, + List subscriptionParameterList) { + try { + + this.context = context; + this.subscription = subscribeService.subscribe(context, ePerson, dSpaceObject, + subscriptionParameterList, type); + + } catch (SQLException | AuthorizeException e) { + log.warn("Failed to create the Subscription", e); + } + return this; + } + + @Override + public void delete(Context c, Subscription dso) throws Exception { + if (Objects.nonNull(dso)) { + getService().deleteSubscription(c, dso); + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/builder/SupervisionOrderBuilder.java b/dspace-api/src/test/java/org/dspace/builder/SupervisionOrderBuilder.java new file mode 100644 index 000000000000..849e4cd4ffb5 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/SupervisionOrderBuilder.java @@ -0,0 +1,94 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.builder; + +import java.sql.SQLException; +import java.util.Objects; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.eperson.Group; +import org.dspace.supervision.SupervisionOrder; +import org.dspace.supervision.service.SupervisionOrderService; + +/** + * Abstract builder to construct SupervisionOrder Objects + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public class SupervisionOrderBuilder + extends AbstractBuilder { + + private static final Logger log = LogManager.getLogger(SupervisionOrderBuilder.class); + + private SupervisionOrder supervisionOrder; + + protected SupervisionOrderBuilder(Context context) { + super(context); + } + + public static SupervisionOrderBuilder createSupervisionOrder(Context context, Item item, Group group) { + SupervisionOrderBuilder builder = new SupervisionOrderBuilder(context); + return builder.create(context, item, group); + } + + private SupervisionOrderBuilder create(Context context, Item item, Group group) { + try { + this.context = context; + this.supervisionOrder = getService().create(context, item, group); + } catch (Exception e) { + log.error("Error in SupervisionOrderBuilder.create(..), error: ", e); + } + return this; + } + + @Override + public void cleanup() throws Exception { + delete(supervisionOrder); + } + + @Override + public SupervisionOrder build() throws SQLException, AuthorizeException { + try { + getService().update(context, supervisionOrder); + context.dispatchEvents(); + indexingService.commit(); + } catch (Exception e) { + log.error("Error in SupervisionOrderBuilder.build(), error: ", e); + } + return supervisionOrder; + } + + @Override + public void delete(Context context, SupervisionOrder supervisionOrder) throws Exception { + if (Objects.nonNull(supervisionOrder)) { + getService().delete(context, supervisionOrder); + } + } + + @Override + protected SupervisionOrderService getService() { + return supervisionOrderService; + } + + private void delete(SupervisionOrder supervisionOrder) throws Exception { + try (Context context = new Context()) { + context.turnOffAuthorisationSystem(); + context.setDispatcher("noindex"); + SupervisionOrder attached = context.reloadEntity(supervisionOrder); + if (attached != null) { + getService().delete(context, attached); + } + context.complete(); + indexingService.commit(); + } + } +} diff --git a/dspace-api/src/test/java/org/dspace/builder/SystemWideAlertBuilder.java b/dspace-api/src/test/java/org/dspace/builder/SystemWideAlertBuilder.java new file mode 100644 index 000000000000..cb6489815235 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/SystemWideAlertBuilder.java @@ -0,0 +1,94 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.builder; + +import java.sql.SQLException; +import java.util.Date; + +import org.dspace.alerts.AllowSessionsEnum; +import org.dspace.alerts.SystemWideAlert; +import org.dspace.alerts.service.SystemWideAlertService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; + +public class SystemWideAlertBuilder extends AbstractBuilder { + + private SystemWideAlert systemWideAlert; + + protected SystemWideAlertBuilder(Context context) { + super(context); + } + + public static SystemWideAlertBuilder createSystemWideAlert(Context context, String message) + throws SQLException, AuthorizeException { + SystemWideAlertBuilder systemWideAlertBuilder = new SystemWideAlertBuilder(context); + return systemWideAlertBuilder.create(context, message, AllowSessionsEnum.ALLOW_ALL_SESSIONS, null, false); + } + + private SystemWideAlertBuilder create(Context context, String message, AllowSessionsEnum allowSessionsType, + Date countdownTo, boolean active) + throws SQLException, AuthorizeException { + this.context = context; + this.systemWideAlert = systemWideAlertService.create(context, message, allowSessionsType, countdownTo, active); + return this; + } + + public SystemWideAlertBuilder withAllowSessions(AllowSessionsEnum allowSessionsType) { + systemWideAlert.setAllowSessions(allowSessionsType); + return this; + } + + public SystemWideAlertBuilder withCountdownDate(Date countdownTo) { + systemWideAlert.setCountdownTo(countdownTo); + return this; + } + + public SystemWideAlertBuilder isActive(boolean isActive) { + systemWideAlert.setActive(isActive); + return this; + } + + @Override + public void cleanup() throws Exception { + try (Context c = new Context()) { + c.setDispatcher("noindex"); + c.turnOffAuthorisationSystem(); + // Ensure object and any related objects are reloaded before checking to see what needs cleanup + systemWideAlert = c.reloadEntity(systemWideAlert); + if (systemWideAlert != null) { + delete(c, systemWideAlert); + } + c.complete(); + indexingService.commit(); + } + } + + @Override + public SystemWideAlert build() { + try { + systemWideAlertService.update(context, systemWideAlert); + context.dispatchEvents(); + indexingService.commit(); + } catch (Exception e) { + return null; + } + return systemWideAlert; + } + + + @Override + protected SystemWideAlertService getService() { + return systemWideAlertService; + } + + public void delete(Context c, SystemWideAlert alert) throws Exception { + if (alert != null) { + getService().delete(c, alert); + } + } +} diff --git a/dspace-api/src/test/java/org/dspace/builder/VersionBuilder.java b/dspace-api/src/test/java/org/dspace/builder/VersionBuilder.java new file mode 100644 index 000000000000..7a2b718df6e2 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/builder/VersionBuilder.java @@ -0,0 +1,112 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.builder; +import java.io.IOException; +import java.sql.SQLException; +import java.util.Objects; + +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; +import org.dspace.versioning.Version; +import org.dspace.versioning.service.VersioningService; + +/** + * Builder to construct Version objects + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public class VersionBuilder extends AbstractBuilder { + + private static final Logger log = LogManager.getLogger(VersionBuilder.class); + + private Version version; + + protected VersionBuilder(Context context) { + super(context); + } + + public static VersionBuilder createVersion(Context context, Item item, String summary) { + VersionBuilder builder = new VersionBuilder(context); + return builder.create(context, item, summary); + } + + private VersionBuilder create(Context context, Item item, String summary) { + try { + this.context = context; + if (StringUtils.isNotBlank(summary)) { + this.version = getService().createNewVersion(context, item, summary); + } else { + this.version = getService().createNewVersion(context, item); + } + } catch (Exception e) { + log.error("Error in VersionBuilder.create(..), error: ", e); + } + return this; + } + + @Override + public Version build() throws SQLException, AuthorizeException { + try { + getService().update(context, version); + context.dispatchEvents(); + indexingService.commit(); + } catch (Exception e) { + log.error("Error in VersionBuilder.build(), error: ", e); + } + return version; + } + + @Override + public void delete(Context context, Version version) throws Exception { + if (Objects.nonNull(version)) { + getService().delete(context, version); + } + } + + @Override + protected VersioningService getService() { + return versioningService; + } + + @Override + public void cleanup() throws Exception { + delete(version); + } + + public void delete(Version version) throws Exception { + try (Context context = new Context()) { + context.turnOffAuthorisationSystem(); + context.setDispatcher("noindex"); + Version attachedTab = context.reloadEntity(version); + if (attachedTab != null) { + getService().delete(context, attachedTab); + } + context.complete(); + } + indexingService.commit(); + } + + public static void delete(Integer id) + throws SQLException, IOException, SearchServiceException { + try (Context context = new Context()) { + context.turnOffAuthorisationSystem(); + Version version = versioningService.getVersion(context, id); + if (version != null) { + versioningService.delete(context, version); + } + context.complete(); + } + indexingService.commit(); + } + +} \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/builder/WorkflowItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/WorkflowItemBuilder.java index b4a3b930fbdb..e06819d0ca4d 100644 --- a/dspace-api/src/test/java/org/dspace/builder/WorkflowItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/WorkflowItemBuilder.java @@ -116,6 +116,7 @@ private void deleteItem(Context c, Item dso) throws Exception { @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup workspaceItem = c.reloadEntity(workspaceItem); diff --git a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java index 416c3e84610c..9d786d4761f0 100644 --- a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java @@ -106,6 +106,7 @@ public static void deleteWorkspaceItem(Integer id) throws SQLException, IOExcept @Override public void cleanup() throws Exception { try (Context c = new Context()) { + c.setDispatcher("noindex"); c.turnOffAuthorisationSystem(); // Ensure object and any related objects are reloaded before checking to see what needs cleanup workspaceItem = c.reloadEntity(workspaceItem); @@ -176,14 +177,22 @@ public WorkspaceItemBuilder withSubject(final String subject) { return addMetadataValue(MetadataSchemaEnum.DC.getName(), "subject", null, subject); } - public WorkspaceItemBuilder withAbstract(final String subject) { - return addMetadataValue(MetadataSchemaEnum.DC.getName(),"description", "abstract", subject); + public WorkspaceItemBuilder withIssn(String issn) { + return addMetadataValue(MetadataSchemaEnum.DC.getName(), "identifier", "issn", issn); } public WorkspaceItemBuilder withEntityType(final String entityType) { return addMetadataValue("dspace", "entity", "type", entityType); } + public WorkspaceItemBuilder withAbstract(final String subject) { + return addMetadataValue(MetadataSchemaEnum.DC.getName(),"description", "abstract", subject); + } + + public WorkspaceItemBuilder withType(final String type) { + return addMetadataValue(MetadataSchemaEnum.DC.getName(),"type", null, type); + } + public WorkspaceItemBuilder grantLicense() { Item item = workspaceItem.getItem(); String license; diff --git a/dspace-api/src/test/java/org/dspace/builder/util/AbstractBuilderCleanupUtil.java b/dspace-api/src/test/java/org/dspace/builder/util/AbstractBuilderCleanupUtil.java index 82555c2d378a..6a8daa432eb6 100644 --- a/dspace-api/src/test/java/org/dspace/builder/util/AbstractBuilderCleanupUtil.java +++ b/dspace-api/src/test/java/org/dspace/builder/util/AbstractBuilderCleanupUtil.java @@ -7,8 +7,8 @@ */ package org.dspace.builder.util; +import java.util.ArrayList; import java.util.LinkedHashMap; -import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -25,10 +25,14 @@ import org.dspace.builder.ItemBuilder; import org.dspace.builder.MetadataFieldBuilder; import org.dspace.builder.MetadataSchemaBuilder; +import org.dspace.builder.OrcidHistoryBuilder; +import org.dspace.builder.OrcidQueueBuilder; +import org.dspace.builder.OrcidTokenBuilder; import org.dspace.builder.PoolTaskBuilder; import org.dspace.builder.ProcessBuilder; import org.dspace.builder.RelationshipBuilder; import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.builder.RequestItemBuilder; import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.builder.SiteBuilder; import org.dspace.builder.WorkflowItemBuilder; @@ -37,7 +41,7 @@ /** * This class will ensure that all the builders that are registered will be cleaned up in the order as defined * in the constructor. This will ensure foreign-key constraint safe deletion of the objects made with these - * builders + * builders. */ public class AbstractBuilderCleanupUtil { @@ -55,26 +59,30 @@ public AbstractBuilderCleanupUtil() { } private void initMap() { - map.put(ResourcePolicyBuilder.class.getName(), new LinkedList<>()); - map.put(RelationshipBuilder.class.getName(), new LinkedList<>()); - map.put(RelationshipTypeBuilder.class.getName(), new LinkedList<>()); - map.put(EntityTypeBuilder.class.getName(), new LinkedList<>()); - map.put(PoolTaskBuilder.class.getName(), new LinkedList<>()); - map.put(WorkflowItemBuilder.class.getName(), new LinkedList<>()); - map.put(WorkspaceItemBuilder.class.getName(), new LinkedList<>()); - map.put(BitstreamBuilder.class.getName(), new LinkedList<>()); - map.put(BitstreamFormatBuilder.class.getName(), new LinkedList<>()); - map.put(ClaimedTaskBuilder.class.getName(), new LinkedList<>()); - map.put(CollectionBuilder.class.getName(), new LinkedList<>()); - map.put(CommunityBuilder.class.getName(), new LinkedList<>()); - map.put(EPersonBuilder.class.getName(), new LinkedList<>()); - map.put(GroupBuilder.class.getName(), new LinkedList<>()); - map.put(BundleBuilder.class.getName(), new LinkedList<>()); - map.put(ItemBuilder.class.getName(), new LinkedList<>()); - map.put(MetadataFieldBuilder.class.getName(), new LinkedList<>()); - map.put(MetadataSchemaBuilder.class.getName(), new LinkedList<>()); - map.put(SiteBuilder.class.getName(), new LinkedList<>()); - map.put(ProcessBuilder.class.getName(), new LinkedList<>()); + map.put(OrcidQueueBuilder.class.getName(), new ArrayList<>()); + map.put(OrcidHistoryBuilder.class.getName(), new ArrayList<>()); + map.put(OrcidTokenBuilder.class.getName(), new ArrayList<>()); + map.put(ResourcePolicyBuilder.class.getName(), new ArrayList<>()); + map.put(RelationshipBuilder.class.getName(), new ArrayList<>()); + map.put(RequestItemBuilder.class.getName(), new ArrayList<>()); + map.put(PoolTaskBuilder.class.getName(), new ArrayList<>()); + map.put(WorkflowItemBuilder.class.getName(), new ArrayList<>()); + map.put(WorkspaceItemBuilder.class.getName(), new ArrayList<>()); + map.put(BitstreamBuilder.class.getName(), new ArrayList<>()); + map.put(BitstreamFormatBuilder.class.getName(), new ArrayList<>()); + map.put(ClaimedTaskBuilder.class.getName(), new ArrayList<>()); + map.put(BundleBuilder.class.getName(), new ArrayList<>()); + map.put(ItemBuilder.class.getName(), new ArrayList<>()); + map.put(CollectionBuilder.class.getName(), new ArrayList<>()); + map.put(CommunityBuilder.class.getName(), new ArrayList<>()); + map.put(GroupBuilder.class.getName(), new ArrayList<>()); + map.put(EPersonBuilder.class.getName(), new ArrayList<>()); + map.put(RelationshipTypeBuilder.class.getName(), new ArrayList<>()); + map.put(EntityTypeBuilder.class.getName(), new ArrayList<>()); + map.put(MetadataFieldBuilder.class.getName(), new ArrayList<>()); + map.put(MetadataSchemaBuilder.class.getName(), new ArrayList<>()); + map.put(SiteBuilder.class.getName(), new ArrayList<>()); + map.put(ProcessBuilder.class.getName(), new ArrayList<>()); } /** @@ -84,7 +92,7 @@ private void initMap() { * @param abstractBuilder The AbstractBuilder to be added */ public void addToMap(AbstractBuilder abstractBuilder) { - map.computeIfAbsent(abstractBuilder.getClass().getName(), k -> new LinkedList<>()).add(abstractBuilder); + map.computeIfAbsent(abstractBuilder.getClass().getName(), k -> new ArrayList<>()).add(abstractBuilder); } /** diff --git a/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java b/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java index 921e4efcc7d8..e85a0fc7b78d 100644 --- a/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java +++ b/dspace-api/src/test/java/org/dspace/content/BitstreamTest.java @@ -432,6 +432,51 @@ public void testDeleteAndExpunge() throws IOException, SQLException, AuthorizeEx assertThat("testExpunge 0", bitstreamService.find(context, bitstreamId), nullValue()); } + /** + * Test of delete method, of class Bitstream. + */ + @Test + public void testDeleteBitstreamAndUnsetPrimaryBitstreamID() + throws IOException, SQLException, AuthorizeException { + + context.turnOffAuthorisationSystem(); + + Community owningCommunity = communityService.create(null, context); + Collection collection = collectionService.create(context, owningCommunity); + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false); + Item item = installItemService.installItem(context, workspaceItem); + Bundle b = bundleService.create(context, item, "TESTBUNDLE"); + + // Allow Bundle REMOVE permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.REMOVE); + // Allow Bitstream WRITE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.WRITE)); + // Allow Bitstream DELETE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.DELETE)); + + //set a value different than default + File f = new File(testProps.get("test.bitstream").toString()); + + // Create a new bitstream, which we can delete. + Bitstream delBS = bitstreamService.create(context, new FileInputStream(f)); + bundleService.addBitstream(context, b, delBS); + // set primary bitstream + b.setPrimaryBitstreamID(delBS); + context.restoreAuthSystemState(); + + // Test that delete will flag the bitstream as deleted + assertFalse("testDeleteBitstreamAndUnsetPrimaryBitstreamID 0", delBS.isDeleted()); + assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 1", b.getPrimaryBitstream(), equalTo(delBS)); + // Delete bitstream + bitstreamService.delete(context, delBS); + assertTrue("testDeleteBitstreamAndUnsetPrimaryBitstreamID 2", delBS.isDeleted()); + + // Now test if the primary bitstream was unset from bundle + assertThat("testDeleteBitstreamAndUnsetPrimaryBitstreamID 3", b.getPrimaryBitstream(), equalTo(null)); + } + /** * Test of retrieve method, of class Bitstream. */ diff --git a/dspace-api/src/test/java/org/dspace/content/BundleTest.java b/dspace-api/src/test/java/org/dspace/content/BundleTest.java index 4ff35f5b4df8..4af64b81cb0c 100644 --- a/dspace-api/src/test/java/org/dspace/content/BundleTest.java +++ b/dspace-api/src/test/java/org/dspace/content/BundleTest.java @@ -513,6 +513,41 @@ public void testRemoveBitstreamAuth() throws SQLException, AuthorizeException, I } + /** + * Test removeBitstream method and also the unsetPrimaryBitstreamID method, of class Bundle. + */ + @Test + public void testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID() + throws IOException, SQLException, AuthorizeException { + // Allow Item WRITE permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); + // Allow Bundle ADD permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.ADD); + // Allow Bundle REMOVE permissions + doNothing().when(authorizeServiceSpy).authorizeAction(context, b, Constants.REMOVE); + // Allow Bitstream WRITE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.WRITE)); + // Allow Bitstream DELETE permissions + doNothing().when(authorizeServiceSpy) + .authorizeAction(any(Context.class), any(Bitstream.class), eq(Constants.DELETE)); + + + context.turnOffAuthorisationSystem(); + //set a value different than default + File f = new File(testProps.get("test.bitstream").toString()); + Bitstream bs = bitstreamService.create(context, new FileInputStream(f)); + bundleService.addBitstream(context, b, bs); + b.setPrimaryBitstreamID(bs); + context.restoreAuthSystemState(); + + assertThat("testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID 0", b.getPrimaryBitstream(), equalTo(bs)); + //remove bitstream + bundleService.removeBitstream(context, b, bs); + //is -1 when not set + assertThat("testRemoveBitstreamAuthAndUnsetPrimaryBitstreamID 1", b.getPrimaryBitstream(), equalTo(null)); + } + /** * Test of update method, of class Bundle. */ diff --git a/dspace-api/src/test/java/org/dspace/content/CollectionTest.java b/dspace-api/src/test/java/org/dspace/content/CollectionTest.java index 1548ebcae0d8..a177571ffa46 100644 --- a/dspace-api/src/test/java/org/dspace/content/CollectionTest.java +++ b/dspace-api/src/test/java/org/dspace/content/CollectionTest.java @@ -725,9 +725,6 @@ public void testRemoveItemAuth() throws Exception { // Allow Item REMOVE perms doNothing().when(authorizeServiceSpy) .authorizeAction(any(Context.class), any(Item.class), eq(Constants.REMOVE)); - // Allow Item WRITE perms (Needed to remove identifiers, e.g. DOI, before Item deletion) - doNothing().when(authorizeServiceSpy) - .authorizeAction(any(Context.class), any(Item.class), eq(Constants.WRITE)); // create & add item first context.turnOffAuthorisationSystem(); @@ -1203,4 +1200,71 @@ public void testGetParentObject() throws SQLException { equalTo(owningCommunity)); } + /** + * Test of retrieveCollectionWithSubmitByEntityType method getting the closest + * collection of non-item type starting from an item + */ + @Test + public void testRetrieveCollectionWithSubmitByEntityType() throws SQLException, AuthorizeException { + context.setDispatcher("default"); + context.turnOffAuthorisationSystem(); + Community com = communityService.create(null, context); + Group submitters = groupService.create(context); + Collection collection = collectionService.create(context, com); + collectionService.addMetadata(context, collection, "dspace", "entity", "type", + null, "Publication"); + com.addCollection(collection); + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false); + Item item = installItemService.installItem(context, workspaceItem); + EPerson epersonA = ePersonService.create(context); + Collection collectionPerson = collectionService.create(context, com); + collectionService.addMetadata(context, collectionPerson, "dspace", "entity", "type", + null, "Person"); + collectionPerson.setSubmitters(submitters); + groupService.addMember(context, submitters, epersonA); + context.setCurrentUser(epersonA); + context.commit(); + context.restoreAuthSystemState(); + Collection resultCollection = collectionService.retrieveCollectionWithSubmitByEntityType + (context, item, "Person"); + + assertThat("testRetrieveCollectionWithSubmitByEntityType 0", resultCollection, notNullValue()); + assertThat("testRetrieveCollectionWithSubmitByEntityType 1", resultCollection, equalTo(collectionPerson)); + + context.setDispatcher("exclude-discovery"); + } + + /** + * Test of rretrieveCollectionWithSubmitByCommunityAndEntityType method getting the closest + * collection of non-community type starting from an community + */ + @Test + public void testRetrieveCollectionWithSubmitByCommunityAndEntityType() throws SQLException, AuthorizeException { + context.setDispatcher("default"); + context.turnOffAuthorisationSystem(); + Community com = communityService.create(null, context); + Group submitters = groupService.create(context); + Collection collection = collectionService.create(context, com); + collectionService.addMetadata(context, collection, "dspace", "entity", "type", + null, "Publication"); + com.addCollection(collection); + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false); + Item item = installItemService.installItem(context, workspaceItem); + EPerson epersonA = ePersonService.create(context); + Collection collectionPerson = collectionService.create(context, com); + collectionService.addMetadata(context, collectionPerson, "dspace", "entity", "type", + null, "Person"); + collectionPerson.setSubmitters(submitters); + groupService.addMember(context, submitters, epersonA); + context.setCurrentUser(epersonA); + context.commit(); + context.restoreAuthSystemState(); + Collection resultCollection = collectionService.retrieveCollectionWithSubmitByCommunityAndEntityType + (context, com, "Person"); + + assertThat("testRetrieveCollectionWithSubmitByEntityType 0", resultCollection, notNullValue()); + assertThat("testRetrieveCollectionWithSubmitByEntityType 1", resultCollection, equalTo(collectionPerson)); + + context.setDispatcher("exclude-discovery"); + } } diff --git a/dspace-api/src/test/java/org/dspace/content/InstallItemTest.java b/dspace-api/src/test/java/org/dspace/content/InstallItemTest.java index b40e46c18074..60c1bc713d2c 100644 --- a/dspace-api/src/test/java/org/dspace/content/InstallItemTest.java +++ b/dspace-api/src/test/java/org/dspace/content/InstallItemTest.java @@ -11,8 +11,6 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.when; import java.io.File; import java.io.FileInputStream; @@ -26,7 +24,6 @@ import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; -import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; @@ -38,7 +35,6 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; -import org.springframework.test.util.ReflectionTestUtils; /** * Unit Tests for class InstallItem @@ -57,12 +53,6 @@ public class InstallItemTest extends AbstractUnitTest { private Collection collection; private Community owningCommunity; - /** - * Spy of AuthorizeService to use for tests - * (initialized / setup in @Before method) - */ - private AuthorizeService authorizeServiceSpy; - /** * log4j category */ @@ -84,14 +74,6 @@ public void init() { this.owningCommunity = communityService.create(null, context); this.collection = collectionService.create(context, owningCommunity); context.restoreAuthSystemState(); - - // Initialize our spy of the autowired (global) authorizeService bean. - // This allows us to customize the bean's method return values in tests below - authorizeServiceSpy = spy(authorizeService); - // "Wire" our spy to be used by the current loaded workspaceItemService and collectionService - // (To ensure it uses the spy instead of the real service) - ReflectionTestUtils.setField(workspaceItemService, "authorizeService", authorizeServiceSpy); - ReflectionTestUtils.setField(collectionService, "authorizeService", authorizeServiceSpy); } catch (SQLException | AuthorizeException ex) { log.error("SQL Error in init", ex); fail("SQL Error in init: " + ex.getMessage()); @@ -154,23 +136,23 @@ public void testInstallItem_validHandle() throws Exception { /** * Test of installItem method (with an invalid handle), of class InstallItem. */ - @Test(expected = AuthorizeException.class) + @Test(expected = IllegalStateException.class) public void testInstallItem_invalidHandle() throws Exception { - // Allow full Admin rights - when(authorizeServiceSpy.isAdmin(context)).thenReturn(true); - // create two items for tests context.turnOffAuthorisationSystem(); - WorkspaceItem is = workspaceItemService.create(context, collection, false); - WorkspaceItem is2 = workspaceItemService.create(context, collection, false); - context.restoreAuthSystemState(); + try { + WorkspaceItem is = workspaceItemService.create(context, collection, false); + WorkspaceItem is2 = workspaceItemService.create(context, collection, false); - //Test assigning the same Handle to two different items - String handle = "123456789/56789"; - installItemService.installItem(context, is, handle); + //Test assigning the same Handle to two different items + String handle = "123456789/56789"; + installItemService.installItem(context, is, handle); - // Assigning the same handle again should throw a RuntimeException - installItemService.installItem(context, is2, handle); + // Assigning the same handle again should throw a RuntimeException + installItemService.installItem(context, is2, handle); + } finally { + context.restoreAuthSystemState(); + } fail("Exception expected"); } diff --git a/dspace-api/src/test/java/org/dspace/content/ItemTest.java b/dspace-api/src/test/java/org/dspace/content/ItemTest.java index 6af1cd5e026d..d440597ec416 100644 --- a/dspace-api/src/test/java/org/dspace/content/ItemTest.java +++ b/dspace-api/src/test/java/org/dspace/content/ItemTest.java @@ -19,6 +19,8 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.File; @@ -41,6 +43,7 @@ import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.BitstreamFormatService; +import org.dspace.content.service.ItemService; import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataSchemaService; import org.dspace.core.Constants; @@ -679,7 +682,7 @@ public void testAddMetadata_7args_2_noauthority() throws SQLException { String schema = "dc"; String element = "contributor"; - String qualifier = "author"; + String qualifier = "editor"; String lang = Item.ANY; String values = "value0"; String authorities = "auth0"; @@ -1186,8 +1189,6 @@ public void testDeleteAuth() throws Exception { doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.REMOVE, true); // Allow Item DELETE perms doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.DELETE); - // Allow Item WRITE perms (required to first delete identifiers) - doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); UUID id = item.getID(); itemService.delete(context, item); @@ -1392,6 +1393,78 @@ public void testInheritCollectionDefaultPolicies() throws Exception { assertTrue("testInheritCollectionDefaultPolicies 2", equals); } + // Test to verify DEFAULT_*_READ policies on collection inherit properly to Item/Bundle/Bitstream + @Test + public void testInheritCollectionDefaultPolicies_custom_default_groups() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new collection + Collection c = createCollection(); + // Create a custom group with DEFAULT_ITEM_READ privileges in this Collection + Group item_read_role = collectionService.createDefaultReadGroup(context, c, "ITEM", + Constants.DEFAULT_ITEM_READ); + // Create a custom group with DEFAULT_BITSTREAM_READ privileges in this Collection + Group bitstream_read_role = collectionService.createDefaultReadGroup(context, c, "BITSTREAM", + Constants.DEFAULT_BITSTREAM_READ); + context.restoreAuthSystemState(); + + // Verify that Collection's DEFAULT_ITEM_READ now uses the newly created group. + List defaultItemReadPolicies = + authorizeService.getPoliciesActionFilter(context, c, Constants.DEFAULT_ITEM_READ); + assertEquals("One DEFAULT_ITEM_READ policy", 1, defaultItemReadPolicies.size()); + assertEquals("DEFAULT_ITEM_READ group", item_read_role.getName(), + defaultItemReadPolicies.get(0).getGroup().getName()); + + // Verify that Collection's DEFAULT_BITSTREAM_READ now uses the newly created group. + List defaultBitstreamReadPolicies = + authorizeService.getPoliciesActionFilter(context, c, Constants.DEFAULT_BITSTREAM_READ); + assertEquals("One DEFAULT_BITSTREAM_READ policy on Collection", 1, defaultBitstreamReadPolicies.size()); + assertEquals("DEFAULT_BITSTREAM_READ group", bitstream_read_role.getName(), + defaultBitstreamReadPolicies.get(0).getGroup().getName()); + + context.turnOffAuthorisationSystem(); + // Create a new Item in this Collection + WorkspaceItem workspaceItem = workspaceItemService.create(context, c, false); + Item item = workspaceItem.getItem(); + // Add a single Bitstream to the ORIGINAL bundle + File f = new File(testProps.get("test.bitstream").toString()); + Bitstream bitstream = itemService.createSingleBitstream(context, new FileInputStream(f), item); + context.restoreAuthSystemState(); + + // Allow Item WRITE perms + doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE, true); + // Inherit all default policies from Collection down to new Item + itemService.inheritCollectionDefaultPolicies(context, item, c); + + // Verify Item inherits DEFAULT_ITEM_READ group from Collection + List itemReadPolicies = authorizeService.getPoliciesActionFilter(context, item, Constants.READ); + assertEquals("One READ policy on Item", 1, itemReadPolicies.size()); + assertEquals("Item's READ group", item_read_role.getName(), + itemReadPolicies.get(0).getGroup().getName()); + + // Verify Bitstream inherits DEFAULT_BITSTREAM_READ group from Collection + List bitstreamReadPolicies = authorizeService.getPoliciesActionFilter(context, bitstream, + Constants.READ); + assertEquals("One READ policy on Bitstream", 1, bitstreamReadPolicies.size()); + assertEquals("Bitstream's READ group", bitstream_read_role.getName(), + bitstreamReadPolicies.get(0).getGroup().getName()); + + // Verify ORIGINAL Bundle inherits DEFAULT_ITEM_READ group from Collection + // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files + // can be listed (even if files are access restricted or embargoed) + List bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME); + Bundle originalBundle = bundles.get(0); + List bundleReadPolicies = authorizeService.getPoliciesActionFilter(context, originalBundle, + Constants.READ); + assertEquals("One READ policy on Bundle", 1, bundleReadPolicies.size()); + assertEquals("Bundles's READ group", item_read_role.getName(), + bundleReadPolicies.get(0).getGroup().getName()); + + // Cleanup after ourselves. Delete created collection & all content under it + context.turnOffAuthorisationSystem(); + collectionService.delete(context, c); + context.restoreAuthSystemState(); + } + /** * Test of move method, of class Item. */ @@ -1410,6 +1483,27 @@ public void testMove() throws Exception { assertThat("testMove 1", it.getOwningCollection(), equalTo(to)); } + /** + * Test of move method, of class Item, where both Collections are the same. + */ + @Test + public void testMoveSameCollection() throws Exception { + context.turnOffAuthorisationSystem(); + while (it.getCollections().size() > 1) { + it.removeCollection(it.getCollections().get(0)); + } + + Collection collection = it.getCollections().get(0); + it.setOwningCollection(collection); + ItemService itemServiceSpy = spy(itemService); + + itemService.move(context, it, collection, collection); + context.restoreAuthSystemState(); + assertThat("testMoveSameCollection 0", it.getOwningCollection(), notNullValue()); + assertThat("testMoveSameCollection 1", it.getOwningCollection(), equalTo(collection)); + verify(itemServiceSpy, times(0)).delete(context, it); + } + /** * Test of hasUploadedFiles method, of class Item. */ diff --git a/dspace-api/src/test/java/org/dspace/content/LeftTiltedRelationshipMetadataServiceIT.java b/dspace-api/src/test/java/org/dspace/content/LeftTiltedRelationshipMetadataServiceIT.java index f8b836ca517a..1ba2bc73a53e 100644 --- a/dspace-api/src/test/java/org/dspace/content/LeftTiltedRelationshipMetadataServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/content/LeftTiltedRelationshipMetadataServiceIT.java @@ -8,6 +8,7 @@ package org.dspace.content; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import java.util.List; @@ -33,8 +34,8 @@ protected void initPublicationAuthor() throws Exception { EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); EntityType authorEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Author").build(); - leftItem = ItemBuilder.createItem(context, col).withEntityType("Publication").build(); - rightItem = ItemBuilder.createItem(context, col).withEntityType("Author") + leftItem = ItemBuilder.createItem(context, col).build(); + rightItem = ItemBuilder.createItem(context, col2) .withPersonIdentifierLastName("familyName") .withPersonIdentifierFirstName("firstName").build(); isAuthorOfPublicationRelationshipType = @@ -71,19 +72,28 @@ public void testGetAuthorRelationshipMetadata() throws Exception { //request the virtual metadata of the publication only List leftList = relationshipMetadataService .getRelationshipMetadata(leftItem, true); - assertThat(leftList.size(), equalTo(2)); - assertThat(leftList.get(0).getValue(), equalTo("familyName, firstName")); - assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("dc")); - assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("contributor")); - assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("author")); - assertThat(leftList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(leftList.size(), equalTo(3)); - assertThat(leftList.get(1).getValue(), equalTo(String.valueOf(rightItem.getID()))); - assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), + assertThat(leftList.get(0).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo(MetadataSchemaEnum.RELATION.getName())); - assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); + assertThat(leftList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(leftList.get(1).getValue(), equalTo("familyName, firstName")); + assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("dc")); + assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("contributor")); + assertThat(leftList.get(1).getMetadataField().getQualifier(), equalTo("author")); assertThat(leftList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(leftList.get(2).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(leftList.get(2).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(leftList.get(2).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(2).getMetadataField().getQualifier(), nullValue()); + assertThat(leftList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID())); + // rightItem is the author List rightRelationshipMetadataList = itemService .getMetadata(rightItem, MetadataSchemaEnum.RELATION.getName(), "isPublicationOfAuthor", null, Item.ANY); diff --git a/dspace-api/src/test/java/org/dspace/content/MetadataDSpaceCsvExportServiceImplIT.java b/dspace-api/src/test/java/org/dspace/content/MetadataDSpaceCsvExportServiceImplIT.java new file mode 100644 index 000000000000..c2d4f56ca61a --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/MetadataDSpaceCsvExportServiceImplIT.java @@ -0,0 +1,115 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.util.Iterator; +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.bulkedit.DSpaceCSV; +import org.dspace.app.bulkedit.DSpaceCSVLine; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.service.MetadataDSpaceCsvExportService; +import org.dspace.scripts.handler.DSpaceRunnableHandler; +import org.dspace.utils.DSpace; +import org.junit.Ignore; +import org.junit.Test; + +/** + * + * @author Mark H. Wood + */ +public class MetadataDSpaceCsvExportServiceImplIT + extends AbstractIntegrationTestWithDatabase { + /** + * Test of handleExport method, of class MetadataDSpaceCsvExportServiceImpl. + * @throws java.lang.Exception passed through. + */ + @Ignore + @Test + public void testHandleExport() + throws Exception { + System.out.println("handleExport"); + boolean exportAllItems = false; + boolean exportAllMetadata = false; + String identifier = ""; + DSpaceRunnableHandler handler = null; + MetadataDSpaceCsvExportServiceImpl instance = new MetadataDSpaceCsvExportServiceImpl(); + DSpaceCSV expResult = null; + DSpaceCSV result = instance.handleExport(context, exportAllItems, + exportAllMetadata, identifier, handler); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + fail("The test case is a prototype."); + } + + /** + * Test of export method, of class MetadataDSpaceCsvExportServiceImpl. + * @throws java.lang.Exception passed through. + */ + @Ignore + @Test + public void testExport_3args_1() + throws Exception { + System.out.println("export"); + Iterator toExport = null; + boolean exportAll = false; + MetadataDSpaceCsvExportServiceImpl instance = new MetadataDSpaceCsvExportServiceImpl(); + DSpaceCSV expResult = null; + DSpaceCSV result = instance.export(context, toExport, exportAll); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + fail("The test case is a prototype."); + } + + /** + * Test of export with mapped Item. + * @throws java.lang.Exception passed through. + */ + @Test + public void testMappedItem() + throws Exception { + System.out.println("export"); + + // Create some content with which to test. + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Community") + .build(); + Collection collection1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection1") + .build(); + Collection collection2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection2") + .build(); + context.setCurrentUser(eperson); + Item item = ItemBuilder.createItem(context, collection1) + .withTitle("Item") + .withIssueDate("1957") + .build(); + item.addCollection(collection2); + context.restoreAuthSystemState(); + + // Test! + MetadataDSpaceCsvExportService instance = new DSpace() + .getServiceManager() + .getServiceByName(MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(), + MetadataDSpaceCsvExportService.class); + DSpaceCSV result = instance.export(context, parentCommunity, false); + + // Examine the result. + List csvLines = result.getCSVLines(); + assertEquals("One item mapped twice should produce one line", + 1, csvLines.size()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/content/MetadataFieldNameTest.java b/dspace-api/src/test/java/org/dspace/content/MetadataFieldNameTest.java index f0ca7d25d8ae..60b43479c767 100644 --- a/dspace-api/src/test/java/org/dspace/content/MetadataFieldNameTest.java +++ b/dspace-api/src/test/java/org/dspace/content/MetadataFieldNameTest.java @@ -26,22 +26,23 @@ public MetadataFieldNameTest() { @Test public void testConstruct3() { MetadataFieldName instance = new MetadataFieldName("one", "two", "three"); - assertEquals("Incorrect schema", "one", instance.SCHEMA); - assertEquals("Incorrect element", "two", instance.ELEMENT); - assertEquals("Incorrect qualifier", "three", instance.QUALIFIER); + assertEquals("Incorrect schema", "one", instance.schema); + assertEquals("Incorrect element", "two", instance.element); + assertEquals("Incorrect qualifier", "three", instance.qualifier); } @Test public void testConstruct2() { MetadataFieldName instance = new MetadataFieldName("one", "two"); - assertEquals("Incorrect schema", "one", instance.SCHEMA); - assertEquals("Incorrect element", "two", instance.ELEMENT); - assertNull("Incorrect qualifier", instance.QUALIFIER); + assertEquals("Incorrect schema", "one", instance.schema); + assertEquals("Incorrect element", "two", instance.element); + assertNull("Incorrect qualifier", instance.qualifier); } @Test(expected = NullPointerException.class) + @SuppressWarnings("ResultOfObjectAllocationIgnored") public void testConstructNull() { - MetadataFieldName instance = new MetadataFieldName("one", null); + new MetadataFieldName("one", null); } /** @@ -71,7 +72,7 @@ public void TestParse2() { */ @Test(expected = IllegalArgumentException.class) public void TestParse1() { - String[] results = MetadataFieldName.parse("one"); + MetadataFieldName.parse("one"); } /** @@ -79,15 +80,16 @@ public void TestParse1() { */ @Test(expected = IllegalArgumentException.class) public void TestParse0() { - String[] results = MetadataFieldName.parse(""); + MetadataFieldName.parse(""); } /** * Test of parse method using an illegal null name. */ @Test(expected = NullPointerException.class) + @SuppressWarnings("null") public void TestParseNull() { - String[] results = MetadataFieldName.parse(null); + MetadataFieldName.parse(null); } /** diff --git a/dspace-api/src/test/java/org/dspace/content/MetadataFieldPerformanceTest.java b/dspace-api/src/test/java/org/dspace/content/MetadataFieldPerformanceTest.java index d9b216f638cb..fefddbf3d9db 100644 --- a/dspace-api/src/test/java/org/dspace/content/MetadataFieldPerformanceTest.java +++ b/dspace-api/src/test/java/org/dspace/content/MetadataFieldPerformanceTest.java @@ -50,7 +50,7 @@ public void testManyQueries() throws SQLException { Assert.assertTrue("Duration (" + duration + ") should be smaller than " + maxDuration + " for " + amount + " tests." + " Max of " + maxDurationPerCall + " ms per operation exceeded: " + - (((double) (duration)) / amount) + " ms.", duration < maxDuration); + (((double) duration) / amount) + " ms.", duration < maxDuration); } @Test @@ -74,14 +74,14 @@ public void testManyMetadataAdds() throws SQLException, AuthorizeException { long duration = (endTime - startTime); - double maxDurationPerCall = .3; + double maxDurationPerCall = .4; double maxDuration = maxDurationPerCall * amount; //Duration is 1.542 without performance improvements //Duration is 0.0538 with performance improvements Assert.assertTrue("Duration (" + duration + ") should be smaller than " + maxDuration + " for " + amount + " tests." + " Max of " + maxDurationPerCall + " ms per operation exceeded: " + - (((double) (duration)) / amount) + " ms.", duration < maxDuration); + (((double) duration) / amount) + " ms.", duration < maxDuration); context.turnOffAuthorisationSystem(); // Delete community & collection created in init() diff --git a/dspace-api/src/test/java/org/dspace/content/RelationshipMetadataServiceIT.java b/dspace-api/src/test/java/org/dspace/content/RelationshipMetadataServiceIT.java index 8ade8ca1f648..b0761946fe35 100644 --- a/dspace-api/src/test/java/org/dspace/content/RelationshipMetadataServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/content/RelationshipMetadataServiceIT.java @@ -8,6 +8,7 @@ package org.dspace.content; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; @@ -56,6 +57,7 @@ public class RelationshipMetadataServiceIT extends AbstractIntegrationTestWithDa Item leftItem; Item rightItem; Collection col; + Collection col2; Relationship relationship; RelationshipType isAuthorOfPublicationRelationshipType; @@ -75,10 +77,15 @@ public void setUp() throws Exception { context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context).build(); - col = CollectionBuilder.createCollection(context, community).build(); + col = CollectionBuilder.createCollection(context, community) + .withEntityType("Publication") + .build(); + col2 = CollectionBuilder.createCollection(context, community) + .withEntityType("Author") + .build(); leftItem = ItemBuilder.createItem(context, col).build(); - rightItem = ItemBuilder.createItem(context, col).build(); + rightItem = ItemBuilder.createItem(context, col2).build(); context.restoreAuthSystemState(); } @@ -90,8 +97,8 @@ protected void initPublicationAuthor() throws Exception { context.turnOffAuthorisationSystem(); EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); EntityType authorEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Author").build(); - leftItem = ItemBuilder.createItem(context, col).withEntityType("Publication").build(); - rightItem = ItemBuilder.createItem(context, col).withEntityType("Author") + leftItem = ItemBuilder.createItem(context, col).build(); + rightItem = ItemBuilder.createItem(context, col2) .withPersonIdentifierLastName("familyName") .withPersonIdentifierFirstName("firstName").build(); isAuthorOfPublicationRelationshipType = @@ -114,8 +121,8 @@ private void initPublicationAuthorWithCopyParams(boolean copyToLeft, boolean cop context.turnOffAuthorisationSystem(); EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); EntityType authorEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Author").build(); - leftItem = ItemBuilder.createItem(context, col).withEntityType("Publication").build(); - rightItem = ItemBuilder.createItem(context, col).withEntityType("Author") + leftItem = ItemBuilder.createItem(context, col).build(); + rightItem = ItemBuilder.createItem(context, col2) .withPersonIdentifierLastName("familyName") .withPersonIdentifierFirstName("firstName").build(); RelationshipType isAuthorOfPublication = @@ -135,12 +142,21 @@ private void initPublicationAuthorWithCopyParams(boolean copyToLeft, boolean cop */ protected void initJournalVolumeIssue() throws Exception { context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection col = CollectionBuilder.createCollection(context, community) + .withEntityType("JournalIssue") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, community) + .withEntityType("JournalVolume") + .build(); + EntityType journalIssueEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "JournalIssue").build(); EntityType publicationVolumeEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "JournalVolume").build(); - leftItem = ItemBuilder.createItem(context, col).withEntityType("JournalIssue") + leftItem = ItemBuilder.createItem(context, col) .withPublicationIssueNumber("2").build(); - rightItem = ItemBuilder.createItem(context, col).withEntityType("JournalVolume") + rightItem = ItemBuilder.createItem(context, col2) .withPublicationVolumeNumber("30").build(); RelationshipType isIssueOfVolume = RelationshipTypeBuilder @@ -171,19 +187,28 @@ public void testGetAuthorRelationshipMetadata() throws Exception { //request the virtual metadata of the publication only List leftList = relationshipMetadataService .getRelationshipMetadata(leftItem, true); - assertThat(leftList.size(), equalTo(2)); - assertThat(leftList.get(0).getValue(), equalTo("familyName, firstName")); - assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("dc")); - assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("contributor")); - assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("author")); - assertThat(leftList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(leftList.size(), equalTo(3)); - assertThat(leftList.get(1).getValue(), equalTo(String.valueOf(rightItem.getID()))); - assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), + assertThat(leftList.get(0).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo(MetadataSchemaEnum.RELATION.getName())); - assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); + assertThat(leftList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(leftList.get(1).getValue(), equalTo("familyName, firstName")); + assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("dc")); + assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("contributor")); + assertThat(leftList.get(1).getMetadataField().getQualifier(), equalTo("author")); assertThat(leftList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(leftList.get(2).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(leftList.get(2).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(leftList.get(2).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); + assertThat(leftList.get(2).getMetadataField().getQualifier(), nullValue()); + assertThat(leftList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID())); + // rightItem is the author List rightRelationshipMetadataList = itemService .getMetadata(rightItem, MetadataSchemaEnum.RELATION.getName(), "isPublicationOfAuthor", null, Item.ANY); @@ -193,12 +218,21 @@ public void testGetAuthorRelationshipMetadata() throws Exception { //request the virtual metadata of the publication List rightList = relationshipMetadataService .getRelationshipMetadata(rightItem, true); - assertThat(rightList.size(), equalTo(1)); + assertThat(rightList.size(), equalTo(2)); + assertThat(rightList.get(0).getValue(), equalTo(String.valueOf(leftItem.getID()))); assertThat(rightList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo(MetadataSchemaEnum.RELATION.getName())); assertThat(rightList.get(0).getMetadataField().getElement(), equalTo("isPublicationOfAuthor")); + assertThat(rightList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); assertThat(rightList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(rightList.get(1).getValue(), equalTo(String.valueOf(leftItem.getID()))); + assertThat(rightList.get(1).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(rightList.get(1).getMetadataField().getElement(), equalTo("isPublicationOfAuthor")); + assertThat(rightList.get(1).getMetadataField().getQualifier(), nullValue()); + assertThat(rightList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); } @Test @@ -365,34 +399,52 @@ public void testGetJournalRelationshipMetadata() throws Exception { //request the virtual metadata of the journal issue List issueRelList = relationshipMetadataService.getRelationshipMetadata(leftItem, true); - assertThat(issueRelList.size(), equalTo(2)); - assertThat(issueRelList.get(0).getValue(), equalTo("30")); - assertThat(issueRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("publicationvolume")); - assertThat(issueRelList.get(0).getMetadataField().getElement(), equalTo("volumeNumber")); - assertThat(issueRelList.get(0).getMetadataField().getQualifier(), equalTo(null)); - assertThat(issueRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(issueRelList.size(), equalTo(3)); - assertThat(issueRelList.get(1).getValue(), equalTo(String.valueOf(rightItem.getID()))); - assertThat(issueRelList.get(1).getMetadataField().getMetadataSchema().getName(), + assertThat(issueRelList.get(0).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(issueRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo(MetadataSchemaEnum.RELATION.getName())); - assertThat(issueRelList.get(1).getMetadataField().getElement(), equalTo("isJournalVolumeOfIssue")); + assertThat(issueRelList.get(0).getMetadataField().getElement(), equalTo("isJournalVolumeOfIssue")); + assertThat(issueRelList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); + assertThat(issueRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(issueRelList.get(1).getValue(), equalTo("30")); + assertThat(issueRelList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("publicationvolume")); + assertThat(issueRelList.get(1).getMetadataField().getElement(), equalTo("volumeNumber")); + assertThat(issueRelList.get(1).getMetadataField().getQualifier(), equalTo(null)); assertThat(issueRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(issueRelList.get(2).getValue(), equalTo(String.valueOf(rightItem.getID()))); + assertThat(issueRelList.get(2).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(issueRelList.get(2).getMetadataField().getElement(), equalTo("isJournalVolumeOfIssue")); + assertThat(issueRelList.get(2).getMetadataField().getQualifier(), nullValue()); + assertThat(issueRelList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID())); + //request the virtual metadata of the journal volume List volumeRelList = relationshipMetadataService.getRelationshipMetadata(rightItem, true); - assertThat(volumeRelList.size(), equalTo(2)); - assertThat(volumeRelList.get(0).getValue(), equalTo("2")); - assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue")); - assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("issueNumber")); - assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo(null)); - assertThat(volumeRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(volumeRelList.size(), equalTo(3)); - assertThat(volumeRelList.get(1).getValue(), equalTo(String.valueOf(leftItem.getID()))); - assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), + assertThat(volumeRelList.get(0).getValue(), equalTo(String.valueOf(leftItem.getID()))); + assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo(MetadataSchemaEnum.RELATION.getName())); - assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); + assertThat(volumeRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(volumeRelList.get(1).getValue(), equalTo("2")); + assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue")); + assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("issueNumber")); + assertThat(volumeRelList.get(1).getMetadataField().getQualifier(), equalTo(null)); assertThat(volumeRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(volumeRelList.get(2).getValue(), equalTo(String.valueOf(leftItem.getID()))); + assertThat(volumeRelList.get(2).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(volumeRelList.get(2).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(2).getMetadataField().getQualifier(), nullValue()); + assertThat(volumeRelList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID())); } @Test @@ -600,53 +652,26 @@ public void testDeleteAuthorRelationshipCopyToBothItemsFromDefaultsInDb() throws } @Test - public void testGetNextRightPlace() throws Exception { - assertThat(relationshipService.findNextRightPlaceByRightItem(context, rightItem), equalTo(0)); - initPublicationAuthor(); - - assertThat(relationshipService.findNextRightPlaceByRightItem(context, rightItem), equalTo(1)); - + public void testGetVirtualMetadata() throws SQLException, AuthorizeException { + // Journal, JournalVolume, JournalIssue, Publication items, related to each other using the relationship types + // isJournalOfVolume, isJournalVolumeOfIssue, isJournalIssueOfPublication. context.turnOffAuthorisationSystem(); - Community community = CommunityBuilder.createCommunity(context).build(); - Collection col = CollectionBuilder.createCollection(context, community).build(); - Item secondItem = ItemBuilder.createItem(context, col).withEntityType("Publication").build(); - RelationshipBuilder.createRelationshipBuilder(context, secondItem, rightItem, - isAuthorOfPublicationRelationshipType).build(); - context.restoreAuthSystemState(); - - assertThat(relationshipService.findNextRightPlaceByRightItem(context, rightItem), equalTo(2)); - } - - @Test - public void testGetNextLeftPlace() throws Exception { - assertThat(relationshipService.findNextLeftPlaceByLeftItem(context, leftItem), equalTo(0)); - initPublicationAuthor(); - - assertThat(relationshipService.findNextLeftPlaceByLeftItem(context, leftItem), equalTo(1)); - - context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context).build(); - Collection col = CollectionBuilder.createCollection(context, community).build(); - Item secondAuthor = ItemBuilder.createItem(context, col).withEntityType("Author") - .withPersonIdentifierFirstName("firstName") - .withPersonIdentifierLastName("familyName").build(); + Collection col = CollectionBuilder.createCollection(context, community) + .withEntityType("JournalIssue") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, community) + .withEntityType("JournalVolume") + .build(); + Collection col3 = CollectionBuilder.createCollection(context, community) + .withEntityType("Journal") + .build(); + Collection col4 = CollectionBuilder.createCollection(context, community) + .withEntityType("Publication") + .build(); - RelationshipBuilder.createRelationshipBuilder(context, leftItem, secondAuthor, - isAuthorOfPublicationRelationshipType).build(); - context.restoreAuthSystemState(); - - assertThat(relationshipService.findNextLeftPlaceByLeftItem(context, leftItem), equalTo(2)); - - - } - - @Test - public void testGetVirtualMetadata() throws SQLException, AuthorizeException { - // Journal, JournalVolume, JournalIssue, Publication items, related to each other using the relationship types - // isJournalOfVolume, isJournalVolumeOfIssue, isJournalIssueOfPublication. - context.turnOffAuthorisationSystem(); EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); EntityType journalIssueEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "JournalIssue").build(); EntityType journalVolumeEntityType = @@ -666,24 +691,21 @@ public void testGetVirtualMetadata() throws SQLException, AuthorizeException { null) .build(); - Community community = CommunityBuilder.createCommunity(context).build(); - Collection collection = CollectionBuilder.createCollection(context, community).build(); - - Item journalIssue = ItemBuilder.createItem(context, collection).withEntityType("JournalIssue").build(); - Item journalVolume = ItemBuilder.createItem(context, collection) + Item journalIssue = ItemBuilder.createItem(context, col).build(); + Item journalVolume = ItemBuilder.createItem(context, col2) .withPublicationVolumeNumber("30") - .withEntityType("JournalVolume").build(); - Item journal = ItemBuilder.createItem(context, collection) + .build(); + Item journal = ItemBuilder.createItem(context, col3) .withMetadata("creativeworkseries", "issn", null, "issn journal") - .withEntityType("Journal").build(); + .build(); RelationshipBuilder.createRelationshipBuilder(context, journalIssue, journalVolume, isJournalVolumeOfIssueRelationshipType).build(); RelationshipBuilder.createRelationshipBuilder(context, journalVolume, journal, isJournalVolumeOfJournalRelationshipType).build(); - Item publication = ItemBuilder.createItem(context, collection) + Item publication = ItemBuilder.createItem(context, col4) .withTitle("Pub 1") - .withEntityType("Publication").build(); + .build(); RelationshipBuilder.createRelationshipBuilder(context, publication, journalIssue, isJournalIssueOfPublicationRelationshipType).build(); diff --git a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplPlaceTest.java b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplPlaceTest.java index 305de076a2f1..3e36f77c68b9 100644 --- a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplPlaceTest.java +++ b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplPlaceTest.java @@ -9,11 +9,15 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.sql.SQLException; +import java.util.Arrays; import java.util.List; +import java.util.stream.Collectors; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; @@ -27,6 +31,7 @@ import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Constants; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -39,6 +44,8 @@ public class RelationshipServiceImplPlaceTest extends AbstractUnitTest { protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); protected RelationshipTypeService relationshipTypeService = ContentServiceFactory.getInstance() .getRelationshipTypeService(); + protected RelationshipMetadataService relationshipMetadataService = + ContentServiceFactory.getInstance().getRelationshipMetadataService(); protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); @@ -52,9 +59,33 @@ public class RelationshipServiceImplPlaceTest extends AbstractUnitTest { Item item; Item authorItem; + + Item author1; + Item author2; + Item author3; + Item author4; + Item author5; + Item author6; + Item publication1; + Item publication2; + Item publication3; + Item publication4; + Item publication5; + Item publication6; + Item project1; + Item project2; + Item project3; + Item project4; + Item project5; + Item project6; + RelationshipType isAuthorOfPublication; + RelationshipType isProjectOfPublication; + RelationshipType isProjectOfPerson; + EntityType publicationEntityType; - EntityType authorEntityType; + EntityType projectEntityType; + EntityType personEntityType; String authorQualifier = "author"; String contributorElement = "contributor"; @@ -84,12 +115,120 @@ public void init() { itemService.addMetadata(context, authorItem, "person", "familyName", null, null, "familyName"); itemService.addMetadata(context, authorItem, "person", "givenName", null, null, "firstName"); + WorkspaceItem wi; + + wi = workspaceItemService.create(context, col, false); + author1 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author1, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author1, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author1, "person", "givenName", null, null, "First"); + + wi = workspaceItemService.create(context, col, false); + author2 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author2, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author2, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author2, "person", "givenName", null, null, "Second"); + + wi = workspaceItemService.create(context, col, false); + author3 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author3, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author3, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author3, "person", "givenName", null, null, "Third"); + + wi = workspaceItemService.create(context, col, false); + author4 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author4, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author4, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author4, "person", "givenName", null, null, "Fourth"); + + wi = workspaceItemService.create(context, col, false); + author5 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author5, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author5, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author5, "person", "givenName", null, null, "Fifth"); + + wi = workspaceItemService.create(context, col, false); + author6 = installItemService.installItem(context, wi); + itemService.addMetadata(context, author6, "dspace", "entity", "type", null, "Person"); + itemService.addMetadata(context, author6, "person", "familyName", null, null, "Author"); + itemService.addMetadata(context, author6, "person", "givenName", null, null, "Sixth"); + + wi = workspaceItemService.create(context, col, false); + publication1 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication1, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication1, "dc", "title", null, null, "Publication 1"); + + wi = workspaceItemService.create(context, col, false); + publication2 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication2, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication2, "dc", "title", null, null, "Publication 2"); + + wi = workspaceItemService.create(context, col, false); + publication3 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication3, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication3, "dc", "title", null, null, "Publication 3"); + + wi = workspaceItemService.create(context, col, false); + publication4 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication4, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication4, "dc", "title", null, null, "Publication 4"); + + wi = workspaceItemService.create(context, col, false); + publication5 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication5, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication5, "dc", "title", null, null, "Publication 5"); + + wi = workspaceItemService.create(context, col, false); + publication6 = installItemService.installItem(context, wi); + itemService.addMetadata(context, publication6, "dspace", "entity", "type", null, "Publication"); + itemService.addMetadata(context, publication6, "dc", "title", null, null, "Publication 6"); + + wi = workspaceItemService.create(context, col, false); + project1 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project1, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project1, "dc", "title", null, null, "Project 1"); + + wi = workspaceItemService.create(context, col, false); + project2 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project2, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project2, "dc", "title", null, null, "Project 2"); + + wi = workspaceItemService.create(context, col, false); + project3 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project3, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project3, "dc", "title", null, null, "Project 3"); + + wi = workspaceItemService.create(context, col, false); + project4 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project4, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project4, "dc", "title", null, null, "Project 4"); + + wi = workspaceItemService.create(context, col, false); + project5 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project5, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project5, "dc", "title", null, null, "Project 5"); + + wi = workspaceItemService.create(context, col, false); + project6 = installItemService.installItem(context, wi); + itemService.addMetadata(context, project6, "dspace", "entity", "type", null, "Project"); + itemService.addMetadata(context, project6, "dc", "title", null, null, "Project 6"); + + publicationEntityType = entityTypeService.create(context, "Publication"); - authorEntityType = entityTypeService.create(context, "Person"); + projectEntityType = entityTypeService.create(context, "Project"); + personEntityType = entityTypeService.create(context, "Person"); isAuthorOfPublication = relationshipTypeService - .create(context, publicationEntityType, authorEntityType, + .create(context, publicationEntityType, personEntityType, "isAuthorOfPublication", "isPublicationOfAuthor", null, null, null, null); + isProjectOfPublication = relationshipTypeService + .create(context, publicationEntityType, projectEntityType, + "isProjectOfPublication", "isPublicationOfProject", + null, null, null, null); + isProjectOfPerson = relationshipTypeService + .create(context, personEntityType, projectEntityType, + "isProjectOfPerson", "isPersonOfProject", + null, null, null, null); context.restoreAuthSystemState(); } catch (AuthorizeException ex) { @@ -226,7 +365,7 @@ public void AddMetadataAndRelationshipWithSpecificPlaceTest() throws Exception { itemService.addMetadata(context, secondAuthorItem, "person", "familyName", null, null, "familyNameTwo"); itemService.addMetadata(context, secondAuthorItem, "person", "givenName", null, null, "firstNameTwo"); Relationship relationshipTwo = relationshipService - .create(context, item, secondAuthorItem, isAuthorOfPublication, 5, -1); + .create(context, item, secondAuthorItem, isAuthorOfPublication, 1, -1); context.restoreAuthSystemState(); @@ -234,16 +373,19 @@ public void AddMetadataAndRelationshipWithSpecificPlaceTest() throws Exception { list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); - assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1)); - assertMetadataValue(authorQualifier, contributorElement, dcSchema, "familyName, firstName", - "virtual::" + relationship.getID(), 2, list.get(2)); - assertThat(relationship.getLeftPlace(), equalTo(2)); - assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3)); - assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 4, list.get(4)); assertMetadataValue(authorQualifier, contributorElement, dcSchema, "familyNameTwo, firstNameTwo", - "virtual::" + relationshipTwo.getID(), 5, list.get(5)); - assertThat(relationshipTwo.getLeftPlace(), equalTo(5)); + "virtual::" + relationshipTwo.getID(), 1, list.get(1)); + assertThat(relationshipTwo.getLeftPlace(), equalTo(1)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list.get(2)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "familyName, firstName", + "virtual::" + relationship.getID(), 3, list.get(3)); + assertThat(relationship.getLeftPlace(), equalTo(3)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 4, list.get(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 5, list.get(5)); } @@ -425,4 +567,2768 @@ private void assertMetadataValue(String authorQualifier, String contributorEleme } + /* RelationshipService#create */ + + @Test + public void createUseForPlaceRelationshipAppendingLeftNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another Author @ leftPlace 0. The existing relationships should get pushed one place forward + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 0, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r3, r1, r2)); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another Author @ leftPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another Author @ leftPlace 2. This should have the same effect as just appending it + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 2, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void createUseForPlaceRelationshipAppendingLeftWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add a dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // Add another Author to the same Publication, appending to the end + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, First", + "Author, Second", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceAtTheStartWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add a dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // Add another Author @ leftPlace 0. All MDVs & relationships after it should get pushed by one place + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 0, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r3, r1, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, Third", + "MDV 1", + "Author, First", + "Author, Second", + "MDV 2" + )); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceInTheMiddleWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add a dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // Add another Author @ leftPlace 2. All MDVs & relationships after it should get pushed by one place + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 2, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, First", + "Author, Third", + "Author, Second", + "MDV 2" + )); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceInTheMiddleWithMetadataTest_ignoreOtherRels( + ) throws Exception { + context.turnOffAuthorisationSystem(); + + // Add a dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + // Add another dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, author2, project2, isProjectOfPerson, -1, -1); + + // Add another Author @ leftPlace 2. All MDVs & relationships after it should get pushed by one place + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 2, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, First", + "Author, Third", + "Author, Second", + "MDV 2" + )); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 0); + assertRightPlace(ur2, 0); + } + + @Test + public void createUseForPlaceRelationshipWithLeftPlaceAtTheEndWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add a dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // Add two Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // Add another dc.contributor.author MDV + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // Add another Author @ leftPlace 4. This should have the same effect as just appending it + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, 4, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, First", + "Author, Second", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void createUseForPlaceRelationshipAppendingRightNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void createUseForPlaceRelationshipWithRightPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + + // Add another Publication @ rightPlace 0. The existing relationships should get pushed one place forward + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, 0); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r3, 0); + assertRightPlace(r1, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r3, r1, r2)); + } + + @Test + public void createUseForPlaceRelationshipWithRightPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + + // Add another Publication @ rightPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3, r2)); + } + + @Test + public void createUseForPlaceRelationshipWithRightPlaceInTheMiddleNoMetadataTest_ignoreOtherRels( + ) throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + + // Add another Publication @ rightPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 1); + assertRightPlace(ur2, 0); + } + + @Test + public void createUseForPlaceRelationshipWithRightPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + + // Add another Publication @ rightPlace 2. This should have the same effect as just appending it + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, 2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void createNonUseForPlaceRelationshipAppendingLeftTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void createNonUseForPlaceRelationshipWithLeftPlaceAtTheStartTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + + // Add another Project @ leftPlace 0. The existing relationships should get pushed one place forward + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, 0, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r3, r1, r2)); + } + + @Test + public void createNonUseForPlaceRelationshipWithLeftPlaceInTheMiddleTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + + // Add another Project @ leftPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, 1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r3, r2)); + } + + @Test + public void createNonUseForPlaceRelationshipWithLeftPlaceInTheMiddleTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + + // Add another Project @ leftPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, 1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r3, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + } + + @Test + public void createNonUseForPlaceRelationshipWithLeftPlaceAtTheEndTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + + // Add another Project @ leftPlace 2. This should have the same effect as just appending it + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, 2, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void createNonUseForPlaceRelationshipAppendingRightTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void createNonUseForPlaceRelationshipWithRightPlaceAtTheStartTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // Add another Author @ rightPlace 0. The existing relationships should get pushed one place forward + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, 0); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r3, 0); + assertRightPlace(r1, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r3, r1, r2)); + } + + @Test + public void createNonUseForPlaceRelationshipWithRightPlaceInTheMiddleTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // Add another Author @ rightPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3, r2)); + } + + @Test + public void createNonUseForPlaceRelationshipWithRightPlaceInTheMiddleTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // Add another Author @ rightPlace 1. The second relationship should get pushed by one place + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, 1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + } + + @Test + public void createNonUseForPlaceRelationshipWithRightPlaceAtTheEndTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add two Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // Add another Author @ rightPlace 2. This should have the same effect as just appending it + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, 2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + /* RelationshipService#move */ + + @Test + public void moveUseForPlaceRelationshipToCurrentLeftPlaceNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.move(context, r1, 0, null); + relationshipService.move(context, r2, 1, null); + relationshipService.move(context, r3, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.move(context, r3, 0, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r3, r1, r2)); + } + + @Test + public void moveUseForPlaceRelationshipUpToLeftPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to leftPlace=1 + relationshipService.move(context, r1, 1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r1, r3)); + } + + @Test + public void moveUseForPlaceRelationshipDownToLeftPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the last Author to leftPlace=1 + relationshipService.move(context, r3, 1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, -1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r1, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheEndOverlapNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r1, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveUseForPlaceRelationshipToCurrentLeftPlaceWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.move(context, r1, 1, null); + relationshipService.move(context, r2, 2, null); + relationshipService.move(context, r3, 4, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, First", + "Author, Second", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheStartWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.move(context, r3, 0, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r3, r1, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, Third", + "MDV 1", + "Author, First", + "Author, Second", + "MDV 2" + )); + } + + @Test + public void moveUseForPlaceRelationshipUpToLeftPlaceInTheMiddleWithTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to leftPlace=3 + relationshipService.move(context, r1, 3, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 1); + assertLeftPlace(r1, 3); + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r1, r3)); + } + + @Test + public void moveUseForPlaceRelationshipUpToLeftPlaceInTheMiddleWithTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, publication1, project2, isProjectOfPublication, -1, -1); + + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to leftPlace=3 + relationshipService.move(context, r1, 3, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 1); + assertLeftPlace(r1, 3); + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r1, r3)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 1); + assertRightPlace(ur2, 0); + } + + @Test + public void moveUseForPlaceRelationshipDownToLeftPlaceInTheMiddleWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the last Author to leftPlace=2 + relationshipService.move(context, r3, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + } + + @Test + public void moveUseForPlaceRelationshipDownToLeftPlaceInTheMiddleWithMetadataTest_ignoreOtherRels( + ) throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, author2, project2, isProjectOfPerson, -1, -1); + + // Move the last Author to leftPlace=2 + relationshipService.move(context, r3, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertLeftPlace(r2, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 0); + assertRightPlace(ur2, 0); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheEndWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, -1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 3); + assertLeftPlace(r1, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveUseForPlaceRelationshipToLeftPlaceAtTheEndOverlapWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, 4, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 3); + assertLeftPlace(r1, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveUseForPlaceRelationshipToCurrentRightPlaceNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + + relationshipService.move(context, r1, null, 0); + relationshipService.move(context, r2, null, 1); + relationshipService.move(context, r3, null, 2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r2, r3)); + } + + @Test + public void moveUseForPlaceRelationshipToRightPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + + relationshipService.move(context, r3, null, 0); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r3, 0); + assertRightPlace(r1, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r3, r1, r2)); + } + + @Test + public void moveUseForPlaceRelationshipUpToRightPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Move the first Author to leftPlace=1 + relationshipService.move(context, r1, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r1, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r2, r1, r3)); + } + + @Test + public void moveUseForPlaceRelationshipDownToRightPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Move the last Author to leftPlace=1 + relationshipService.move(context, r3, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3, r2)); + } + + @Test + public void moveUseForPlaceRelationshipToRightPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, null, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRightPlace(r1, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveUseForPlaceRelationshipToRightPlaceAtTheEndOverlapNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, null, 2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRightPlace(r1, 2); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r2, r3, r1)); + } + + @Test + public void moveNonUseForPlaceRelationshipToCurrentLeftPlaceNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the last Project to the front + relationshipService.move(context, r1, 0, null); + relationshipService.move(context, r2, 1, null); + relationshipService.move(context, r3, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void moveNonUseForPlaceRelationshipToLeftPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the last Project to the front + relationshipService.move(context, r3, 0, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r3, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r3, r1, r2)); + } + + @Test + public void moveNonUseForPlaceRelationshipUpToLeftPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the first Author to leftPlace=1 + relationshipService.move(context, r1, 1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r1, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r1, r3)); + } + + @Test + public void moveNonUseForPlaceRelationshipDownToLeftPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the last Author to leftPlace=1 + relationshipService.move(context, r3, 1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r3, r2)); + } + + @Test + public void moveNonUseForPlaceRelationshipToLeftPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, -1, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r1, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r3, r1)); + } + + @Test + public void moveNonUseForPlaceRelationshipToLeftPlaceAtTheEndOverlapNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, 2, null); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertLeftPlace(r1, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r3, r1)); + } + + @Test + public void moveNonUseForPlaceRelationshipToCurrentRightPlaceNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + + relationshipService.move(context, r1, null, 0); + relationshipService.move(context, r2, null, 1); + relationshipService.move(context, r3, null, 2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void moveNonUseForPlaceRelationshipToRightPlaceAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + + relationshipService.move(context, r3, null, 0); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r3, 0); + assertRightPlace(r1, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r3, r1, r2)); + } + + @Test + public void moveNonUseForPlaceRelationshipUpToRightPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Move the first Author to leftPlace=1 + relationshipService.move(context, r1, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r1, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r1, r3)); + } + + @Test + public void moveNonUseForPlaceRelationshipUpToRightPlaceInTheMiddleNoMetadataTest_ignoreOtherRels( + ) throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, publication2, project1, isProjectOfPublication, -1, -1); + + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur3 = relationshipService.create(context, publication3, project1, isProjectOfPublication, -1, -1); + + // Move the first Author to leftPlace=1 + relationshipService.move(context, r1, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r1, 1); + assertRightPlace(r3, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r1, r3)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 0); + assertRightPlace(ur2, 1); + assertLeftPlace(ur3, 0); + assertRightPlace(ur3, 2); + } + + @Test + public void moveNonUseForPlaceRelationshipDownToRightPlaceInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Move the last Author to leftPlace=1 + relationshipService.move(context, r3, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3, r2)); + } + + @Test + public void moveNonUseForPlaceRelationshipDownToRightPlaceInTheMiddleNoMetadataTest_ignoreOtherRels( + ) throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Move the last Author to leftPlace=1 + relationshipService.move(context, r3, null, 1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRightPlace(r2, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + } + + @Test + public void moveNonUseForPlaceRelationshipToRightPlaceAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Move the first Author to the back + relationshipService.move(context, r1, null, -1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRightPlace(r1, 2); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r3, r1)); + } + + /* RelationshipService#delete */ + + @Test + public void deleteUseForPlaceRelationshipFromLeftStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Delete the first Author + relationshipService.delete(context, r1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3)); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Delete the second Author + relationshipService.delete(context, r2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3)); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Publication, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Delete the third Author + relationshipService.delete(context, r3); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2)); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftStartWithMetadataNoCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r1, false, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, Second", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftStartWithMetadataCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r1, true, false); + + context.restoreAuthSystemState(); + + // Check relationship order + // NOTE: since R1 has been removed, but copied to left, this place remains at 2 (instead of 1) + assertLeftPlace(r2, 2); + // NOTE: since R1 has been removed, but copied to left, this place remains at 4 (instead of 3) + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(null, r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", // this is not longer a relationship + "MDV 1", + "Author, Second", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftMiddleWithMetadataNoCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r2, false, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "MDV 2", + "Author, Third" + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftMiddleWithMetadataNoCopyTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r2, false, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "MDV 2", + "Author, Third" + )); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftMiddleWithMetadataCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r2, true, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + // NOTE: since R2 has been removed, but copied to left, this place remains at 4 (instead of 3) + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, null, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "Author, Second", // this is not longer a relationship + "MDV 2", + "Author, Third" + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftMiddleWithMetadataCopyTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, publication1, project2, isProjectOfPublication, -1, -1); + + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur3 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r2, true, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + // NOTE: since R2 has been removed, but copied to left, this place remains at 4 (instead of 3) + assertLeftPlace(r3, 4); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, null, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "Author, Second", // this is not longer a relationship + "MDV 2", + "Author, Third" + )); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 1); + assertRightPlace(ur2, 0); + assertLeftPlace(ur3, 0); + assertRightPlace(ur3, 0); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftEndWithMetadataNoCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + + relationshipService.delete(context, r3, false, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "Author, Second", + "MDV 2" + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromLeftEndWithMetadataCopyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Initialize MDVs and Relationships + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + relationshipService.delete(context, r3, true, false); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, Arrays.asList(r1, r2, null)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "Author, Second", + "MDV 2", + "Author, Third" // this is not longer a relationship + )); + } + + @Test + public void deleteUseForPlaceRelationshipFromRightStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Delete the first Publication + relationshipService.delete(context, r1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r2, r3)); + } + + @Test + public void deleteUseForPlaceRelationshipFromRightMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Delete the second Publication + relationshipService.delete(context, r2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3)); + } + + @Test + public void deleteUseForPlaceRelationshipFromRightEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Delete the third Publication + relationshipService.delete(context, r3); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r2)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromLeftStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Delete the first Author + relationshipService.delete(context, r1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r3)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromLeftMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Delete the second Author + relationshipService.delete(context, r2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r3)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromLeftEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to the same Author, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Delete the third Author + relationshipService.delete(context, r3); + + context.restoreAuthSystemState(); + + // Check relationship order + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromRightStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Delete the first Publication + relationshipService.delete(context, r1); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r3)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromRightMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Delete the second Publication + relationshipService.delete(context, r2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3)); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromRightMiddleNoMetadataTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Delete the second Publication + relationshipService.delete(context, r2); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + } + + @Test + public void deleteNonUseForPlaceRelationshipFromRightEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to the same Project, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Delete the third Publication + relationshipService.delete(context, r3); + + context.restoreAuthSystemState(); + + // Check relationship order + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r2)); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, appending to the end + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r1 to publication 2 + relationshipService.move(context, r1, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r2, 0); // should both move down as the first Relationship was removed + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3)); + + // Check relationship order for publication2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r1, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r1)); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, appending to the end + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r2 to publication 2 + relationshipService.move(context, r2, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); // should move down as the second Relationship was removed + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3)); + + // Check relationship order for publication2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r2)); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, appending to the end + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r3 to publication 2 + relationshipService.move(context, r3, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2)); + + // Check relationship order for publication2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r3, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r3)); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipAtTheStartWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, with regular MDVs in between + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, with regular MDVs in between + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 3"); + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 4"); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r1 to publication 2 + relationshipService.move(context, r1, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r2, 1); // should both move down as the first Relationship was removed + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r2, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "MDV 1", + "Author, Second", + "MDV 2", + "Author, Third" + )); + + // Check relationship order for publication2 + assertLeftPlace(r4, 1); // previous Relationships should stay where they were + assertLeftPlace(r5, 2); + assertLeftPlace(r6, 4); + assertLeftPlace(r1, 5); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r1)); + assertMetadataOrder(publication2, "dc.contributor.author", List.of( + "MDV 3", + "Author, Fourth", + "Author, Fifth", + "MDV 4", + "Author, Sixth", + "Author, First" + )); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipInTheMiddleWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, with regular MDVs in between + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, with regular MDVs in between + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 3"); + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 4"); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r2 to publication 2 + relationshipService.move(context, r2, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); // should both move down as the first Relationship was removed + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "MDV 2", + "Author, Third" + )); + + // Check relationship order for publication2 + assertLeftPlace(r4, 1); // previous Relationships should stay where they were + assertLeftPlace(r5, 2); + assertLeftPlace(r6, 4); + assertLeftPlace(r2, 5); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r2)); + assertMetadataOrder(publication2, "dc.contributor.author", List.of( + "MDV 3", + "Author, Fourth", + "Author, Fifth", + "MDV 4", + "Author, Sixth", + "Author, Second" + )); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipInTheMiddleWithMetadataTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, with regular MDVs in between + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, publication1, project3, isProjectOfPublication, -1, -1); + + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur3 = relationshipService.create(context, publication2, project2, isProjectOfPublication, -1, -1); + + // Add three Authors to publication2, with regular MDVs in between + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 3"); + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur4 = relationshipService.create(context, publication2, project1, isProjectOfPublication, -1, -1); + + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 4"); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r2 to publication 2 + relationshipService.move(context, r2, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); // should both move down as the first Relationship was removed + assertLeftPlace(r3, 3); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r3)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "MDV 2", + "Author, Third" + )); + + // Check relationship order for publication2 + assertLeftPlace(r4, 1); // previous Relationships should stay where they were + assertLeftPlace(r5, 2); + assertLeftPlace(r6, 4); + assertLeftPlace(r2, 5); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r2)); + assertMetadataOrder(publication2, "dc.contributor.author", List.of( + "MDV 3", + "Author, Fourth", + "Author, Fifth", + "MDV 4", + "Author, Sixth", + "Author, Second" + )); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 1); + assertRightPlace(ur2, 0); + assertLeftPlace(ur3, 0); + assertRightPlace(ur3, 0); + assertLeftPlace(ur4, 1); + assertRightPlace(ur4, 1); + } + + @Test + public void changeLeftItemInUseForPlaceRelationshipAtTheEndWithMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to publication1, with regular MDVs in between + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 1"); + Relationship r2 = relationshipService.create(context, publication1, author2, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication1, dcSchema, contributorElement, authorQualifier, null, "MDV 2"); + Relationship r3 = relationshipService.create(context, publication1, author3, isAuthorOfPublication, -1, -1); + + // Add three Authors to publication2, with regular MDVs in between + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 3"); + Relationship r4 = relationshipService.create(context, publication2, author4, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication2, author5, isAuthorOfPublication, -1, -1); + itemService.addMetadata(context, publication2, dcSchema, contributorElement, authorQualifier, null, "MDV 4"); + Relationship r6 = relationshipService.create(context, publication2, author6, isAuthorOfPublication, -1, -1); + + // Move r3 to publication 2 + relationshipService.move(context, r3, publication2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 2); + assertRelationMetadataOrder(publication1, isAuthorOfPublication, List.of(r1, r2)); + assertMetadataOrder(publication1, "dc.contributor.author", List.of( + "Author, First", + "MDV 1", + "Author, Second", + "MDV 2" + )); + + // Check relationship order for publication2 + assertLeftPlace(r4, 1); // previous Relationships should stay where they were + assertLeftPlace(r5, 2); + assertLeftPlace(r6, 4); + assertLeftPlace(r3, 5); // moved Relationship should be appended to the end + assertRelationMetadataOrder(publication2, isAuthorOfPublication, List.of(r4, r5, r6, r3)); + assertMetadataOrder(publication2, "dc.contributor.author", List.of( + "MDV 3", + "Author, Fourth", + "Author, Fifth", + "MDV 4", + "Author, Sixth", + "Author, Third" + )); + } + + @Test + public void changeRightItemInUseForPlaceRelationshipAtTheStartNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to author1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Add three Publications to author2, appending to the end + Relationship r4 = relationshipService.create(context, publication4, author2, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication5, author2, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication6, author2, isAuthorOfPublication, -1, -1); + + // Move r1 to author2 + relationshipService.move(context, r1, null, author2); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertRightPlace(r2, 0); // should both move down as the first Relationship was removed + assertRightPlace(r3, 1); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r2, r3)); + + // Check relationship order for author2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r1, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isAuthorOfPublication, List.of(r4, r5, r6, r1)); + } + + @Test + public void changeRightItemInUseForPlaceRelationshipInTheMiddleNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to author1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Add three Publications to author2, appending to the end + Relationship r4 = relationshipService.create(context, publication4, author2, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication5, author2, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication6, author2, isAuthorOfPublication, -1, -1); + + // Move r2 to author2 + relationshipService.move(context, r2, null, author2); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); // should move down as the first Relationship was removed + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3)); + + // Check relationship order for author2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isAuthorOfPublication, List.of(r4, r5, r6, r2)); + } + + @Test + public void changeRightItemInUseForPlaceRelationshipInTheMiddleNoMetadataTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to author1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Add three Publications to author2, appending to the end + Relationship r4 = relationshipService.create(context, publication4, author2, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication5, author2, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication6, author2, isAuthorOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // Move r2 to author2 + relationshipService.move(context, r2, null, author2); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); // should move down as the first Relationship was removed + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r3)); + + // Check relationship order for author2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isAuthorOfPublication, List.of(r4, r5, r6, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 0); + assertRightPlace(ur2, 1); + } + + @Test + public void changeRightItemInUseForPlaceRelationshipAtTheEndNoMetadataTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Publications to author1, appending to the end + Relationship r1 = relationshipService.create(context, publication1, author1, isAuthorOfPublication, -1, -1); + Relationship r2 = relationshipService.create(context, publication2, author1, isAuthorOfPublication, -1, -1); + Relationship r3 = relationshipService.create(context, publication3, author1, isAuthorOfPublication, -1, -1); + + // Add three Publications to author2, appending to the end + Relationship r4 = relationshipService.create(context, publication4, author2, isAuthorOfPublication, -1, -1); + Relationship r5 = relationshipService.create(context, publication5, author2, isAuthorOfPublication, -1, -1); + Relationship r6 = relationshipService.create(context, publication6, author2, isAuthorOfPublication, -1, -1); + + // Move r3 to author2 + relationshipService.move(context, r3, null, author2); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRelationMetadataOrder(author1, isAuthorOfPublication, List.of(r1, r2)); + + // Check relationship order for author2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r3, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isAuthorOfPublication, List.of(r4, r5, r6, r3)); + } + + @Test + public void changeLeftItemInNonUseForPlaceRelationshipAtTheStart() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Add three Projects to author2, appending to the end + Relationship r4 = relationshipService.create(context, author2, project4, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author2, project5, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author2, project6, isProjectOfPerson, -1, -1); + + // Move r1 to publication 2 + relationshipService.move(context, r1, author2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertLeftPlace(r2, 0); // should both move down as the first Relationship was removed + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r3)); + + // Check relationship order for author2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r1, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isProjectOfPerson, List.of(r4, r5, r6, r1)); + } + + @Test + public void changeLeftItemInNonUseNonForPlaceRelationshipInTheMiddle() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Add three Projects to author2, appending to the end + Relationship r4 = relationshipService.create(context, author2, project4, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author2, project5, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author2, project6, isProjectOfPerson, -1, -1); + + // Move r2 to author2 + relationshipService.move(context, r2, author2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertLeftPlace(r1, 0); + assertLeftPlace(r3, 1); // should move down as the second Relationship was removed + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r3)); + + // Check relationship order for author2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isProjectOfPerson, List.of(r4, r5, r6, r2)); + } + + @Test + public void changeLeftItemInNonUseForPlaceRelationshipAtTheEnd() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Add three Projects to author2, appending to the end + Relationship r4 = relationshipService.create(context, author2, project4, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author2, project5, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author2, project6, isProjectOfPerson, -1, -1); + + // Move r3 to author2 + relationshipService.move(context, r3, author2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for publication1 + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2)); + + // Check relationship order for publication2 + assertLeftPlace(r4, 0); // previous Relationships should stay where they were + assertLeftPlace(r5, 1); + assertLeftPlace(r6, 2); + assertLeftPlace(r3, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(author2, isProjectOfPerson, List.of(r4, r5, r6, r3)); + } + + @Test + public void changeRightItemInUseNonForPlaceRelationshipAtTheStartTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to project1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Add three Authors to project2, appending to the end + Relationship r4 = relationshipService.create(context, author4, project2, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author5, project2, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author6, project2, isProjectOfPerson, -1, -1); + + // Move r1 to project2 + relationshipService.move(context, r1, null, project2); + + context.restoreAuthSystemState(); + + // Check relationship order for project1 + assertRightPlace(r2, 0); // should both move down as the first Relationship was removed + assertRightPlace(r3, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r3)); + + // Check relationship order for project2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r1, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(project2, isProjectOfPerson, List.of(r4, r5, r6, r1)); + } + + @Test + public void changeRightItemInNonUseForPlaceRelationshipInTheMiddleTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to project1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Add three Authors to project2, appending to the end + Relationship r4 = relationshipService.create(context, author4, project2, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author5, project2, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author6, project2, isProjectOfPerson, -1, -1); + + // Move r2 to project2 + relationshipService.move(context, r2, null, project2); + + context.restoreAuthSystemState(); + + // Check relationship order for project1 + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); // should move down as the first Relationship was removed + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3)); + + // Check relationship order for project2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(project2, isProjectOfPerson, List.of(r4, r5, r6, r2)); + } + + @Test + public void changeRightItemInNonUseForPlaceRelationshipInTheMiddleTest_ignoreOtherRels() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to project1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur1 = relationshipService.create(context, publication1, project1, isProjectOfPublication, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur2 = relationshipService.create(context, publication2, project1, isProjectOfPublication, -1, -1); + + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Add three Authors to project2, appending to the end + Relationship r4 = relationshipService.create(context, author4, project2, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author5, project2, isProjectOfPerson, -1, -1); + + // NOTE: unrelated relationship => should not be affected + Relationship ur3 = relationshipService.create(context, author5, project3, isProjectOfPerson, -1, -1); + + Relationship r6 = relationshipService.create(context, author6, project2, isProjectOfPerson, -1, -1); + + // Move r2 to project2 + relationshipService.move(context, r2, null, project2); + + context.restoreAuthSystemState(); + + // Check relationship order for project1 + assertRightPlace(r1, 0); + assertRightPlace(r3, 1); // should move down as the first Relationship was removed + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r3)); + + // Check relationship order for project2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r2, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(project2, isProjectOfPerson, List.of(r4, r5, r6, r2)); + + // check unaffected relationships + assertLeftPlace(ur1, 0); + assertRightPlace(ur1, 0); + assertLeftPlace(ur2, 0); + assertRightPlace(ur2, 1); + assertLeftPlace(ur3, 1); + assertRightPlace(ur3, 0); + } + + @Test + public void changeRightItemInNonUseForPlaceRelationshipAtTheEndTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to project1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Add three Authors to project2, appending to the end + Relationship r4 = relationshipService.create(context, author4, project2, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author5, project2, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author6, project2, isProjectOfPerson, -1, -1); + + // Move r3 to project2 + relationshipService.move(context, r3, null, project2); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 + assertRightPlace(r1, 0); + assertRightPlace(r2, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r2)); + + // Check relationship order for author2 + assertRightPlace(r4, 0); // previous Relationships should stay where they were + assertRightPlace(r5, 1); + assertRightPlace(r6, 2); + assertRightPlace(r3, 3); // moved Relationship should be appended to the end + assertRelationMetadataOrder(project2, isProjectOfPerson, List.of(r4, r5, r6, r3)); + } + + @Test + public void changeLeftItemInNonUseForPlaceRelationshipAtTheStartToSameItemNoChanges() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move r1 to author1 + relationshipService.move(context, r1, author1, null); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 -> should remain unchanged + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void changeRightItemInNonUseForPlaceRelationshipAtTheStartToSameItemNoChanges() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Move r1 to author1 + relationshipService.move(context, r1, null, project1); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 -> should remain unchanged + assertLeftPlace(r1, 0); + assertLeftPlace(r2, 1); + assertLeftPlace(r3, 2); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r2, r3)); + } + + @Test + public void changeLeftItemInNonUseForPlaceRelationshipAtTheStartWithSiblingsInOldLeftItem() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Projects to author1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author1, project2, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author1, project3, isProjectOfPerson, -1, -1); + + // Add three Authors to project1, appending to the end + Relationship r4 = relationshipService.create(context, author4, project1, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author5, project1, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author6, project1, isProjectOfPerson, -1, -1); + + // Move r1 to author2 + relationshipService.move(context, r1, author2, null); + + context.restoreAuthSystemState(); + + // Check relationship order for author1 -> should shift down by one + assertLeftPlace(r2, 0); + assertLeftPlace(r3, 1); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r2, r3)); + + // Check relationship order for project 1 -> should remain unchanged + assertRightPlace(r1, 0); + assertRightPlace(r4, 1); + assertRightPlace(r5, 2); + assertRightPlace(r6, 3); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r1, r4, r5, r6)); + } + + @Test + public void changeRightItemInNonUseForPlaceRelationshipAtTheStartWithSiblingsInOldRightItem() throws Exception { + context.turnOffAuthorisationSystem(); + + // Add three Authors to project1, appending to the end + Relationship r1 = relationshipService.create(context, author1, project1, isProjectOfPerson, -1, -1); + Relationship r2 = relationshipService.create(context, author2, project1, isProjectOfPerson, -1, -1); + Relationship r3 = relationshipService.create(context, author3, project1, isProjectOfPerson, -1, -1); + + // Add three Projects to author1, appending to the end + Relationship r4 = relationshipService.create(context, author1, project4, isProjectOfPerson, -1, -1); + Relationship r5 = relationshipService.create(context, author1, project5, isProjectOfPerson, -1, -1); + Relationship r6 = relationshipService.create(context, author1, project6, isProjectOfPerson, -1, -1); + + // Move r1 to project2 + relationshipService.move(context, r1, null, project2); + + context.restoreAuthSystemState(); + + // Check relationship order for project1 -> should remain unchanged + assertRightPlace(r2, 0); + assertRightPlace(r3, 1); + assertRelationMetadataOrder(project1, isProjectOfPerson, List.of(r2, r3)); + + // Check relationship order for author1 -> should remain unchanged + assertLeftPlace(r1, 0); + assertLeftPlace(r4, 1); + assertLeftPlace(r5, 2); + assertLeftPlace(r6, 3); + assertRelationMetadataOrder(author1, isProjectOfPerson, List.of(r1, r4, r5, r6)); + } + + + private void assertLeftPlace(Relationship relationship, int leftPlace) { + assertEquals(leftPlace, relationship.getLeftPlace()); + } + + private void assertRightPlace(Relationship relationship, int rightPlace) { + assertEquals(rightPlace, relationship.getRightPlace()); + } + + + private void assertRelationMetadataOrder( + Item item, RelationshipType relationshipType, List relationships + ) { + String element = getRelationshipTypeStringForEntity(relationshipType, item); + List mdvs = itemService.getMetadata( + item, + MetadataSchemaEnum.RELATION.getName(), element, null, + Item.ANY + ); + + assertEquals( + "Metadata authorities should match relationship IDs", + relationships.stream() + .map(r -> { + if (r != null) { + return Constants.VIRTUAL_AUTHORITY_PREFIX + r.getID(); + } else { + return null; // To match relationships that have been deleted and copied to MDVs + } + }) + .collect(Collectors.toList()), + mdvs.stream() + .map(MetadataValue::getAuthority) + .collect(Collectors.toList()) + ); + } + + private void assertMetadataOrder( + Item item, String metadataField, List metadataValues + ) { + List mdvs = itemService.getMetadataByMetadataString(item, metadataField); + + assertEquals( + metadataValues, + mdvs.stream() + .map(MetadataValue::getValue) + .collect(Collectors.toList()) + ); + } + + private String getRelationshipTypeStringForEntity(RelationshipType relationshipType, Item item) { + String entityType = itemService.getEntityTypeLabel(item); + + if (StringUtils.equals(entityType, relationshipType.getLeftType().getLabel())) { + return relationshipType.getLeftwardType(); + } else if (StringUtils.equals(entityType, relationshipType.getRightType().getLabel())) { + return relationshipType.getRightwardType(); + } else { + throw new IllegalArgumentException( + entityType + "is not a valid entity for " + relationshipType.getLeftwardType() + ", must be either " + + relationshipType.getLeftType().getLabel() + " or " + relationshipType.getRightType().getLabel() + ); + } + } } diff --git a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplTest.java b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplTest.java index 5d6197e49460..579e05b3deb2 100644 --- a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplTest.java +++ b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplTest.java @@ -24,12 +24,14 @@ import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.services.ConfigurationService; +import org.dspace.versioning.utils.RelationshipVersioningUtils; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; +import org.mockito.Spy; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) @@ -71,6 +73,9 @@ public class RelationshipServiceImplTest { @Mock private ConfigurationService configurationService; + @Spy + private RelationshipVersioningUtils relationshipVersioningUtils; + @Before public void init() { relationshipsList = new ArrayList<>(); @@ -112,9 +117,6 @@ public void testFindByItem() throws Exception { relationshipTest.add(getRelationship(bob, cindy, hasMother,1,0)); when(relationshipService.findByItem(context, cindy, -1, -1, false)).thenReturn(relationshipTest); - // Mock the state of objects utilized in findByItem() to meet the success criteria of the invocation - when(relationshipDAO.findByItem(context, cindy, -1, -1, false)).thenReturn(relationshipTest); - List results = relationshipService.findByItem(context, cindy); assertEquals("TestFindByItem 0", relationshipTest, results); for (int i = 0; i < relationshipTest.size(); i++) { @@ -122,32 +124,6 @@ public void testFindByItem() throws Exception { } } - @Test - public void testFindLeftPlaceByLeftItem() throws Exception { - // Declare objects utilized in unit test - Item item = mock(Item.class); - - // Mock DAO to return mocked left place as 0 - when(relationshipDAO.findNextLeftPlaceByLeftItem(context, item)).thenReturn(0); - - // The left place reported from out mocked item should match the DAO's report of the left place - assertEquals("TestFindLeftPlaceByLeftItem 0", relationshipDAO.findNextLeftPlaceByLeftItem(context, item), - relationshipService.findNextLeftPlaceByLeftItem(context, item)); - } - - @Test - public void testFindRightPlaceByRightItem() throws Exception { - // Declare objects utilized in unit test - Item item = mock(Item.class); - - // Mock lower level DAO to return mocked right place as 0 - when(relationshipDAO.findNextRightPlaceByRightItem(context, item)).thenReturn(0); - - // The right place reported from out mocked item should match the DAO's report of the right place - assertEquals("TestFindRightPlaceByRightItem 0", relationshipDAO.findNextRightPlaceByRightItem(context, item), - relationshipService.findNextRightPlaceByRightItem(context, item)); - } - @Test public void testFindByItemAndRelationshipType() throws Exception { // Declare objects utilized in unit test diff --git a/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningIT.java b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningIT.java new file mode 100644 index 000000000000..1b6f23032d57 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/RelationshipServiceImplVersioningIT.java @@ -0,0 +1,1105 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.content.dao.RelationshipDAO; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.RelationshipService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +public class RelationshipServiceImplVersioningIT extends AbstractIntegrationTestWithDatabase { + + private RelationshipService relationshipService; + private RelationshipDAO relationshipDAO; + + protected Community community; + protected Collection collection; + protected EntityType publicationEntityType; + protected EntityType personEntityType; + protected RelationshipType relationshipType; + protected Item publication1; + protected Item publication2; + protected Item publication3; + protected Item person1; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + + relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + relationshipDAO = DSpaceServicesFactory.getInstance().getServiceManager() + .getServicesByType(RelationshipDAO.class).get(0); + + context.turnOffAuthorisationSystem(); + + community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication") + .build(); + + personEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person") + .build(); + + relationshipType = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publicationEntityType, personEntityType, + "isAuthorOfPublication", "isPublicationOfAuthor", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + publication1 = ItemBuilder.createItem(context, collection) + .withTitle("publication1") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + publication2 = ItemBuilder.createItem(context, collection) + .withTitle("publication2") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + publication3 = ItemBuilder.createItem(context, collection) + .withTitle("publication3") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + person1 = ItemBuilder.createItem(context, collection) + .withTitle("person1") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + context.restoreAuthSystemState(); + } + + @Test + public void testRelationshipLatestVersionStatusDefault() throws Exception { + // create method #1 + context.turnOffAuthorisationSystem(); + Relationship relationship1 = relationshipService.create( + context, publication1, person1, relationshipType, 3, 5, "left", "right" + ); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship1.getLatestVersionStatus()); + Relationship relationship2 = relationshipService.find(context, relationship1.getID()); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship2.getLatestVersionStatus()); + + // create method #2 + context.turnOffAuthorisationSystem(); + Relationship relationship3 = relationshipService.create( + context, publication2, person1, relationshipType, 3, 5 + ); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship3.getLatestVersionStatus()); + Relationship relationship4 = relationshipService.find(context, relationship3.getID()); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship4.getLatestVersionStatus()); + + // create method #3 + Relationship inputRelationship = new Relationship(); + inputRelationship.setLeftItem(publication3); + inputRelationship.setRightItem(person1); + inputRelationship.setRelationshipType(relationshipType); + context.turnOffAuthorisationSystem(); + Relationship relationship5 = relationshipService.create(context, inputRelationship); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship5.getLatestVersionStatus()); + Relationship relationship6 = relationshipService.find(context, relationship5.getID()); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship6.getLatestVersionStatus()); + + // clean up + context.turnOffAuthorisationSystem(); + relationshipService.delete(context, relationship1); + relationshipService.delete(context, relationship3); + relationshipService.delete(context, relationship5); + context.restoreAuthSystemState(); + } + + @Test + public void testRelationshipLatestVersionStatusBoth() throws Exception { + // create method #1 + context.turnOffAuthorisationSystem(); + Relationship relationship1 = relationshipService.create( + context, publication1, person1, relationshipType, 3, 5, "left", "right", + Relationship.LatestVersionStatus.BOTH // set latest version status + ); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship1.getLatestVersionStatus()); + Relationship relationship2 = relationshipService.find(context, relationship1.getID()); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship2.getLatestVersionStatus()); + + // create method #2 + Relationship inputRelationship = new Relationship(); + inputRelationship.setLeftItem(publication2); + inputRelationship.setRightItem(person1); + inputRelationship.setRelationshipType(relationshipType); + inputRelationship.setLatestVersionStatus(Relationship.LatestVersionStatus.BOTH); // set latest version status + context.turnOffAuthorisationSystem(); + Relationship relationship3 = relationshipService.create(context, inputRelationship); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship3.getLatestVersionStatus()); + Relationship relationship4 = relationshipService.find(context, relationship3.getID()); + assertEquals(Relationship.LatestVersionStatus.BOTH, relationship4.getLatestVersionStatus()); + + // clean up + context.turnOffAuthorisationSystem(); + relationshipService.delete(context, relationship1); + relationshipService.delete(context, relationship3); + context.restoreAuthSystemState(); + } + + @Test + public void testRelationshipLatestVersionStatusLeftOnly() throws Exception { + // create method #1 + context.turnOffAuthorisationSystem(); + Relationship relationship1 = relationshipService.create( + context, publication1, person1, relationshipType, 3, 5, "left", "right", + Relationship.LatestVersionStatus.LEFT_ONLY // set latest version status + ); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.LEFT_ONLY, relationship1.getLatestVersionStatus()); + Relationship relationship2 = relationshipService.find(context, relationship1.getID()); + assertEquals(Relationship.LatestVersionStatus.LEFT_ONLY, relationship2.getLatestVersionStatus()); + + // create method #2 + Relationship inputRelationship = new Relationship(); + inputRelationship.setLeftItem(publication2); + inputRelationship.setRightItem(person1); + inputRelationship.setRelationshipType(relationshipType); + inputRelationship.setLatestVersionStatus(Relationship.LatestVersionStatus.LEFT_ONLY); // set LVS + context.turnOffAuthorisationSystem(); + Relationship relationship3 = relationshipService.create(context, inputRelationship); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.LEFT_ONLY, relationship3.getLatestVersionStatus()); + Relationship relationship4 = relationshipService.find(context, relationship3.getID()); + assertEquals(Relationship.LatestVersionStatus.LEFT_ONLY, relationship4.getLatestVersionStatus()); + + // clean up + context.turnOffAuthorisationSystem(); + relationshipService.delete(context, relationship1); + relationshipService.delete(context, relationship3); + context.restoreAuthSystemState(); + } + + @Test + public void testRelationshipLatestVersionStatusRightOnly() throws Exception { + // create method #1 + context.turnOffAuthorisationSystem(); + Relationship relationship1 = relationshipService.create( + context, publication1, person1, relationshipType, 3, 5, "left", "right", + Relationship.LatestVersionStatus.RIGHT_ONLY // set latest version status + ); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.RIGHT_ONLY, relationship1.getLatestVersionStatus()); + Relationship relationship2 = relationshipService.find(context, relationship1.getID()); + assertEquals(Relationship.LatestVersionStatus.RIGHT_ONLY, relationship2.getLatestVersionStatus()); + + // create method #2 + Relationship inputRelationship = new Relationship(); + inputRelationship.setLeftItem(publication2); + inputRelationship.setRightItem(person1); + inputRelationship.setRelationshipType(relationshipType); + inputRelationship.setLatestVersionStatus(Relationship.LatestVersionStatus.RIGHT_ONLY); // set LVS + context.turnOffAuthorisationSystem(); + Relationship relationship3 = relationshipService.create(context, inputRelationship); + context.restoreAuthSystemState(); + assertEquals(Relationship.LatestVersionStatus.RIGHT_ONLY, relationship3.getLatestVersionStatus()); + Relationship relationship4 = relationshipService.find(context, relationship3.getID()); + assertEquals(Relationship.LatestVersionStatus.RIGHT_ONLY, relationship4.getLatestVersionStatus()); + + // clean up + context.turnOffAuthorisationSystem(); + relationshipService.delete(context, relationship1); + relationshipService.delete(context, relationship3); + context.restoreAuthSystemState(); + } + + protected void assertRelationship(Relationship expectedRelationship, List relationships) { + assertNotNull(relationships); + assertEquals(1, relationships.size()); + assertEquals(expectedRelationship, relationships.get(0)); + } + + protected void assertNoRelationship(List relationships) { + assertNotNull(relationships); + assertEquals(0, relationships.size()); + } + + @Test + public void testExcludeNonLatestBoth() throws Exception { + context.turnOffAuthorisationSystem(); + Relationship relationship1 = RelationshipBuilder + .createRelationshipBuilder(context, publication1, person1, relationshipType) + .withLatestVersionStatus(Relationship.LatestVersionStatus.BOTH) + .build(); + context.restoreAuthSystemState(); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, false, true) + ); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, -1, -1, false, true) + ); + + assertEquals(1, relationshipDAO.countByItem(context, publication1, false, false)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, false, false)); + assertEquals(1, relationshipDAO.countByItem(context, person1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, true, false)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, true, false)); + assertEquals(1, relationshipDAO.countByItem(context, person1, true, true)); + + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false, true) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1) + ); + + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals(1, relationshipService.countByItem(context, publication1)); + assertEquals(1, relationshipService.countByItem(context, person1)); + + assertEquals(1, relationshipService.countByItem(context, publication1, false, false)); + assertEquals(1, relationshipService.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipService.countByItem(context, person1, false, false)); + assertEquals(1, relationshipService.countByItem(context, person1, false, true)); + assertEquals(1, relationshipService.countByItem(context, publication1, true, false)); + assertEquals(1, relationshipService.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipService.countByItem(context, person1, true, false)); + assertEquals(1, relationshipService.countByItem(context, person1, true, true)); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true) + ); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + } + + @Test + public void testExcludeNonLatestLeftOnly() throws Exception { + context.turnOffAuthorisationSystem(); + Relationship relationship1 = RelationshipBuilder + .createRelationshipBuilder(context, publication1, person1, relationshipType) + .withLatestVersionStatus(Relationship.LatestVersionStatus.LEFT_ONLY) + .build(); + context.restoreAuthSystemState(); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, false, false) + ); + assertNoRelationship( + relationshipDAO.findByItem(context, publication1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, false, true) + ); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, -1, -1, false, false) + ); + assertNoRelationship( + relationshipDAO.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, -1, -1, false, true) + ); + + assertEquals(1, relationshipDAO.countByItem(context, publication1, false, false)); + assertEquals(0, relationshipDAO.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, false, false)); + assertEquals(1, relationshipDAO.countByItem(context, person1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, true, false)); + assertEquals(0, relationshipDAO.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, true, false)); + assertEquals(1, relationshipDAO.countByItem(context, person1, true, true)); + + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + + assertNoRelationship( + relationshipService.findByItem(context, publication1) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1) + ); + + assertNoRelationship( + relationshipService.findByItem(context, publication1, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false, false) + ); + assertNoRelationship( + relationshipService.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false, true) + ); + + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType) + ); + + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1) + ); + + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals(0, relationshipService.countByItem(context, publication1)); + assertEquals(1, relationshipService.countByItem(context, person1)); + + assertEquals(1, relationshipService.countByItem(context, publication1, false, false)); + assertEquals(0, relationshipService.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipService.countByItem(context, person1, false, false)); + assertEquals(1, relationshipService.countByItem(context, person1, false, true)); + assertEquals(1, relationshipService.countByItem(context, publication1, true, false)); + assertEquals(0, relationshipService.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipService.countByItem(context, person1, true, false)); + assertEquals(1, relationshipService.countByItem(context, person1, true, true)); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true) + ); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + } + + @Test + public void testExcludeNonLatestRightOnly() throws Exception { + context.turnOffAuthorisationSystem(); + Relationship relationship1 = RelationshipBuilder + .createRelationshipBuilder(context, publication1, person1, relationshipType) + .withLatestVersionStatus(Relationship.LatestVersionStatus.RIGHT_ONLY) + .build(); + context.restoreAuthSystemState(); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, false, false) + ); + assertNoRelationship( + relationshipDAO.findByItem(context, person1, false, true) + ); + + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItem(context, person1, -1, -1, false, false) + ); + assertNoRelationship( + relationshipDAO.findByItem(context, person1, -1, -1, false, true) + ); + + assertEquals(1, relationshipDAO.countByItem(context, publication1, false, false)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, false, false)); + assertEquals(0, relationshipDAO.countByItem(context, person1, false, true)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, true, false)); + assertEquals(1, relationshipDAO.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipDAO.countByItem(context, person1, true, false)); + assertEquals(0, relationshipDAO.countByItem(context, person1, true, true)); + + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipDAO.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipDAO.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1) + ); + assertNoRelationship( + relationshipService.findByItem(context, person1) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItem(context, person1, -1, -1, false) + ); + + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, publication1, -1, -1, false, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItem(context, person1, -1, -1, false, false) + ); + assertNoRelationship( + relationshipService.findByItem(context, person1, -1, -1, false, true) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1) + ); + + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, -1, -1, true) + ); + + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1) + ); + + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, false, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, false) + ); + assertRelationship( + relationship1, + relationshipService + .findByItemAndRelationshipType(context, publication1, relationshipType, true, -1, -1, true) + ); + assertRelationship( + relationship1, + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, false, -1, -1, true) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, false) + ); + assertNoRelationship( + relationshipService.findByItemAndRelationshipType(context, person1, relationshipType, true, -1, -1, true) + ); + + assertEquals(1, relationshipService.countByItem(context, publication1)); + assertEquals(0, relationshipService.countByItem(context, person1)); + + assertEquals(1, relationshipService.countByItem(context, publication1, false, false)); + assertEquals(1, relationshipService.countByItem(context, publication1, false, true)); + assertEquals(1, relationshipService.countByItem(context, person1, false, false)); + assertEquals(0, relationshipService.countByItem(context, person1, false, true)); + assertEquals(1, relationshipService.countByItem(context, publication1, true, false)); + assertEquals(1, relationshipService.countByItem(context, publication1, true, true)); + assertEquals(1, relationshipService.countByItem(context, person1, true, false)); + assertEquals(0, relationshipService.countByItem(context, person1, true, true)); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true) + ); + + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, false, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, false) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, publication1, relationshipType, true, true) + ); + assertEquals( + 1, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, false, true) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, false) + ); + assertEquals( + 0, relationshipService.countByItemAndRelationshipType(context, person1, relationshipType, true, true) + ); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/content/RightTiltedRelationshipMetadataServiceIT.java b/dspace-api/src/test/java/org/dspace/content/RightTiltedRelationshipMetadataServiceIT.java index 92d2961a43e6..c5359b23f0fc 100644 --- a/dspace-api/src/test/java/org/dspace/content/RightTiltedRelationshipMetadataServiceIT.java +++ b/dspace-api/src/test/java/org/dspace/content/RightTiltedRelationshipMetadataServiceIT.java @@ -8,10 +8,13 @@ package org.dspace.content; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import java.util.List; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; import org.dspace.builder.EntityTypeBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.RelationshipBuilder; @@ -29,12 +32,22 @@ public class RightTiltedRelationshipMetadataServiceIT extends RelationshipMetada @Override protected void initJournalVolumeIssue() throws Exception { context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection col = CollectionBuilder.createCollection(context, community) + .withEntityType("JournalIssue") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, community) + .withEntityType("JournalVolume") + .build(); + EntityType journalIssueEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "JournalIssue").build(); EntityType publicationVolumeEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "JournalVolume").build(); - leftItem = ItemBuilder.createItem(context, col).withEntityType("JournalIssue") + leftItem = ItemBuilder.createItem(context, col) .withPublicationIssueNumber("2").build(); - rightItem = ItemBuilder.createItem(context, col).withEntityType("JournalVolume") + rightItem = ItemBuilder.createItem(context, col2) .withPublicationVolumeNumber("30").build(); RelationshipType isIssueOfVolume = RelationshipTypeBuilder @@ -76,18 +89,27 @@ public void testGetJournalRelationshipMetadata() throws Exception { //request the virtual metadata of the journal volume List volumeRelList = relationshipMetadataService.getRelationshipMetadata(rightItem, true); - assertThat(volumeRelList.size(), equalTo(2)); - assertThat(volumeRelList.get(0).getValue(), equalTo("2")); - assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue")); - assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("issueNumber")); - assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo(null)); - assertThat(volumeRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + assertThat(volumeRelList.size(), equalTo(3)); - assertThat(volumeRelList.get(1).getValue(), equalTo(String.valueOf(leftItem.getID()))); - assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), + assertThat(volumeRelList.get(0).getValue(), equalTo(String.valueOf(leftItem.getID()))); + assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo(MetadataSchemaEnum.RELATION.getName())); - assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery")); + assertThat(volumeRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(volumeRelList.get(1).getValue(), equalTo("2")); + assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue")); + assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("issueNumber")); + assertThat(volumeRelList.get(1).getMetadataField().getQualifier(), equalTo(null)); assertThat(volumeRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); + + assertThat(volumeRelList.get(2).getValue(), equalTo(String.valueOf(leftItem.getID()))); + assertThat(volumeRelList.get(2).getMetadataField().getMetadataSchema().getName(), + equalTo(MetadataSchemaEnum.RELATION.getName())); + assertThat(volumeRelList.get(2).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); + assertThat(volumeRelList.get(2).getMetadataField().getQualifier(), nullValue()); + assertThat(volumeRelList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID())); } } diff --git a/dspace-api/src/test/java/org/dspace/content/SupervisedItemTest.java b/dspace-api/src/test/java/org/dspace/content/SupervisedItemTest.java deleted file mode 100644 index aece739f25af..000000000000 --- a/dspace-api/src/test/java/org/dspace/content/SupervisedItemTest.java +++ /dev/null @@ -1,200 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.content; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.io.IOException; -import java.sql.SQLException; -import java.util.List; -import java.util.UUID; - -import org.apache.logging.log4j.Logger; -import org.dspace.AbstractUnitTest; -import org.dspace.authorize.AuthorizeException; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.CommunityService; -import org.dspace.content.service.SupervisedItemService; -import org.dspace.content.service.WorkspaceItemService; -import org.dspace.core.Context; -import org.dspace.eperson.EPerson; -import org.dspace.eperson.Group; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.eperson.service.EPersonService; -import org.dspace.eperson.service.GroupService; -import org.dspace.eperson.service.SupervisorService; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -/** - * @author pvillega - */ -public class SupervisedItemTest extends AbstractUnitTest { - - /** - * log4j category - */ - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SupervisedItemTest.class); - - protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); - protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); - protected SupervisedItemService supervisedItemService = ContentServiceFactory.getInstance() - .getSupervisedItemService(); - protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); - protected SupervisorService supervisorService = EPersonServiceFactory.getInstance().getSupervisorService(); - - protected UUID communityId; - protected UUID groupId; - protected int workspaceItemId; - - - /** - * This method will be run before every test as per @Before. It will - * initialize resources required for the tests. - * - * Other methods can be annotated with @Before here or in subclasses - * but no execution order is guaranteed - */ - @Before - @Override - public void init() { - super.init(); - try { - //we have to create a new community in the database - context.turnOffAuthorisationSystem(); - Community owningCommunity = communityService.create(null, context); - Collection collection = collectionService.create(context, owningCommunity); - WorkspaceItem si = workspaceItemService.create(context, collection, false); - Group gr = groupService.create(context); - EPerson currentUser = context.getCurrentUser(); - groupService.addMember(context, gr, currentUser); - groupService.update(context, gr); - - //set a supervisor as editor - supervisorService.add(context, gr, si, 1); - - communityId = owningCommunity.getID(); - workspaceItemId = si.getID(); - groupId = gr.getID(); - - //we need to commit the changes so we don't block the table for testing - context.restoreAuthSystemState(); - context.complete(); - context = new Context(); - context.setCurrentUser(currentUser); - } catch (AuthorizeException ex) { - log.error("Authorization Error in init", ex); - fail("Authorization Error in init: " + ex.getMessage()); - } catch (SQLException ex) { - log.error("SQL Error in init", ex); - fail("SQL Error in init"); - } - } - - /** - * This method will be run after every test as per @After. It will - * clean resources initialized by the @Before methods. - * - * Other methods can be annotated with @After here or in subclasses - * but no execution order is guaranteed - */ - @After - @Override - public void destroy() { - try { - context.turnOffAuthorisationSystem(); - communityService.delete(context, communityService.find(context, communityId)); - context.restoreAuthSystemState(); - } catch (SQLException | AuthorizeException | IOException ex) { - log.error("SQL Error in destroy", ex); - fail("SQL Error in destroy: " + ex.getMessage()); - } - super.destroy(); - } - - /** - * Test of getAll method, of class SupervisedItem. - */ - @Test - public void testGetAll() throws Exception { - List found = supervisedItemService.getAll(context); - assertThat("testGetAll 0", found, notNullValue()); - assertTrue("testGetAll 1", found.size() >= 1); - - boolean added = false; - for (WorkspaceItem sia : found) { - if (sia.getID() == workspaceItemId) { - added = true; - } - } - assertTrue("testGetAll 2", added); - } - - /** - * Test of getSupervisorGroups method, of class SupervisedItem. - */ - @Test - public void testGetSupervisorGroups_Context_int() throws Exception { - List found = workspaceItemService.find(context, workspaceItemId).getSupervisorGroups(); - assertThat("testGetSupervisorGroups_Context_int 0", found, notNullValue()); - assertTrue("testGetSupervisorGroups_Context_int 1", found.size() == 1); - assertThat("testGetSupervisorGroups_Context_int 2", found.get(0).getID(), equalTo(groupId)); - } - - /** - * Test of getSupervisorGroups method, of class SupervisedItem. - */ - @Test - public void testGetSupervisorGroups_0args() throws Exception { - List found = workspaceItemService.find(context, workspaceItemId).getSupervisorGroups(); - assertThat("testGetSupervisorGroups_0args 0", found, notNullValue()); - assertTrue("testGetSupervisorGroups_0args 1", found.size() == 1); - - boolean added = false; - for (Group g : found) { - if (g.getID().equals(groupId)) { - added = true; - } - } - assertTrue("testGetSupervisorGroups_0args 2", added); - } - - /** - * Test of findbyEPerson method, of class SupervisedItem. - */ - @Test - public void testFindbyEPerson() throws Exception { - context.turnOffAuthorisationSystem(); - List found = supervisedItemService.findbyEPerson(context, ePersonService.create(context)); - assertThat("testFindbyEPerson 0", found, notNullValue()); - assertTrue("testFindbyEPerson 1", found.size() == 0); - - found = supervisedItemService.findbyEPerson(context, context.getCurrentUser()); - assertThat("testFindbyEPerson 2", found, notNullValue()); - assertTrue("testFindbyEPerson 3", found.size() >= 1); - - boolean added = false; - for (WorkspaceItem sia : found) { - if (sia.getID() == workspaceItemId) { - added = true; - } - } - assertTrue("testFindbyEPerson 4", added); - - context.restoreAuthSystemState(); - } - -} diff --git a/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsIT.java b/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsIT.java new file mode 100644 index 000000000000..44653300e0de --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/VersioningWithRelationshipsIT.java @@ -0,0 +1,4203 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content; + +import static org.dspace.content.Relationship.LatestVersionStatus.BOTH; +import static org.dspace.content.Relationship.LatestVersionStatus.LEFT_ONLY; +import static org.dspace.content.Relationship.LatestVersionStatus.RIGHT_ONLY; +import static org.dspace.util.RelationshipVersioningTestUtils.isRel; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.startsWith; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.instanceOf; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +import org.apache.commons.lang3.function.FailableRunnable; +import org.apache.commons.lang3.function.FailableSupplier; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.content.virtual.Collected; +import org.dspace.content.virtual.VirtualMetadataConfiguration; +import org.dspace.content.virtual.VirtualMetadataPopulator; +import org.dspace.core.Constants; +import org.dspace.discovery.SolrSearchCore; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.versioning.Version; +import org.dspace.versioning.factory.VersionServiceFactory; +import org.dspace.versioning.service.VersioningService; +import org.hamcrest.Matcher; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.config.AutowireCapableBeanFactory; + +public class VersioningWithRelationshipsIT extends AbstractIntegrationTestWithDatabase { + + private final RelationshipService relationshipService = + ContentServiceFactory.getInstance().getRelationshipService(); + private final VersioningService versioningService = + VersionServiceFactory.getInstance().getVersionService(); + private final WorkspaceItemService workspaceItemService = + ContentServiceFactory.getInstance().getWorkspaceItemService(); + private final InstallItemService installItemService = + ContentServiceFactory.getInstance().getInstallItemService(); + private final ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + private final SolrSearchCore solrSearchCore = + DSpaceServicesFactory.getInstance().getServiceManager().getServicesByType(SolrSearchCore.class).get(0); + + protected Community community; + protected Collection collection; + protected EntityType publicationEntityType; + protected EntityType personEntityType; + protected EntityType projectEntityType; + protected EntityType orgUnitEntityType; + protected EntityType journalIssueEntityType; + protected EntityType journalVolumeEntityType; + protected RelationshipType isAuthorOfPublication; + protected RelationshipType isProjectOfPublication; + protected RelationshipType isOrgUnitOfPublication; + protected RelationshipType isMemberOfProject; + protected RelationshipType isMemberOfOrgUnit; + protected RelationshipType isIssueOfJournalVolume; + protected RelationshipType isProjectOfPerson; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + community = CommunityBuilder.createCommunity(context) + .withName("community") + .build(); + + collection = CollectionBuilder.createCollection(context, community) + .withName("collection") + .build(); + + publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication") + .build(); + + personEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person") + .build(); + + projectEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Project") + .build(); + + orgUnitEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "OrgUnit") + .build(); + + journalIssueEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "JournalIssue") + .build(); + + journalVolumeEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "JournalVolume") + .build(); + + isAuthorOfPublication = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publicationEntityType, personEntityType, + "isAuthorOfPublication", "isPublicationOfAuthor", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isProjectOfPublication = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publicationEntityType, projectEntityType, + "isProjectOfPublication", "isPublicationOfProject", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isOrgUnitOfPublication = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publicationEntityType, orgUnitEntityType, + "isOrgUnitOfPublication", "isPublicationOfOrgUnit", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isMemberOfProject = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, projectEntityType, personEntityType, + "isMemberOfProject", "isProjectOfMember", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isMemberOfOrgUnit = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, orgUnitEntityType, personEntityType, + "isMemberOfOrgUnit", "isOrgUnitOfMember", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isIssueOfJournalVolume = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, journalVolumeEntityType, journalIssueEntityType, + "isIssueOfJournalVolume", "isJournalVolumeOfIssue", + null, null, 1, 1 + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + isProjectOfPerson = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, personEntityType, projectEntityType, + "isProjectOfPerson", "isPersonOfProject", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + } + + protected Relationship getRelationship( + Item leftItem, RelationshipType relationshipType, Item rightItem + ) throws Exception { + List rels = relationshipService.findByRelationshipType(context, relationshipType).stream() + .filter(rel -> leftItem.getID().equals(rel.getLeftItem().getID())) + .filter(rel -> rightItem.getID().equals(rel.getRightItem().getID())) + .collect(Collectors.toList()); + + if (rels.size() == 0) { + return null; + } + + if (rels.size() == 1) { + return rels.get(0); + } + + // NOTE: this shouldn't be possible because of database constraints + throw new IllegalStateException(); + } + + @Test + public void test_createNewVersionOfItemOnLeftSideOfRelationships() throws Exception { + /////////////////////////////////////////////// + // create a publication with 3 relationships // + /////////////////////////////////////////////// + + Item person1 = ItemBuilder.createItem(context, collection) + .withTitle("person 1") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + Item project1 = ItemBuilder.createItem(context, collection) + .withTitle("project 1") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + Item orgUnit1 = ItemBuilder.createItem(context, collection) + .withTitle("org unit 1") + .withMetadata("dspace", "entity", "type", orgUnitEntityType.getLabel()) + .build(); + + Item originalPublication = ItemBuilder.createItem(context, collection) + .withTitle("original publication") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, person1, isAuthorOfPublication) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, project1, isProjectOfPublication) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, orgUnit1, isOrgUnitOfPublication) + .build(); + + ///////////////////////////////////////////////////////// + // verify that the relationships were properly created // + ///////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + ///////////////////////////////////////////// + // create a new version of the publication // + ///////////////////////////////////////////// + + Version newVersion = versioningService.createNewVersion(context, originalPublication); + Item newPublication = newVersion.getItem(); + assertNotSame(originalPublication, newPublication); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + //////////////////////////////////////// + // do item install on new publication // + //////////////////////////////////////// + + WorkspaceItem newPublicationWSI = workspaceItemService.findByItem(context, newPublication); + installItemService.installItem(context, newPublicationWSI); + context.dispatchEvents(); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + ////////////// + // clean up // + ////////////// + + // need to manually delete all relationships to avoid SQL constraint violation exception + List relationships = relationshipService.findAll(context); + for (Relationship relationship : relationships) { + relationshipService.delete(context, relationship); + } + } + + @Test + public void test_createNewVersionOfItemAndModifyRelationships() throws Exception { + /////////////////////////////////////////////// + // create a publication with 3 relationships // + /////////////////////////////////////////////// + + Item person1 = ItemBuilder.createItem(context, collection) + .withTitle("person 1") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + Item project1 = ItemBuilder.createItem(context, collection) + .withTitle("project 1") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + Item orgUnit1 = ItemBuilder.createItem(context, collection) + .withTitle("org unit 1") + .withMetadata("dspace", "entity", "type", orgUnitEntityType.getLabel()) + .build(); + + Item originalPublication = ItemBuilder.createItem(context, collection) + .withTitle("original publication") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, person1, isAuthorOfPublication) + .build(); + + RelationshipBuilder + .createRelationshipBuilder(context, originalPublication, project1, isProjectOfPublication) + .build(); + + RelationshipBuilder + .createRelationshipBuilder(context, originalPublication, orgUnit1, isOrgUnitOfPublication) + .build(); + + ///////////////////////////////////////////////////////// + // verify that the relationships were properly created // + ///////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + ///////////////////////////////////////////// + // create a new version of the publication // + ///////////////////////////////////////////// + + Version newVersion = versioningService.createNewVersion(context, originalPublication); + Item newPublication = newVersion.getItem(); + assertNotSame(originalPublication, newPublication); + + ///////////////////////////////////////////// + // modify relationships on new publication // + ///////////////////////////////////////////// + + Item person2 = ItemBuilder.createItem(context, collection) + .withTitle("person 2") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + Item orgUnit2 = ItemBuilder.createItem(context, collection) + .withTitle("org unit 2") + .withMetadata("dspace", "entity", "type", orgUnitEntityType.getLabel()) + .build(); + + // on new item, remove relationship with project 1 + List newProjectRels = relationshipService + .findByItemAndRelationshipType(context, newPublication, isProjectOfPublication); + assertEquals(1, newProjectRels.size()); + relationshipService.delete(context, newProjectRels.get(0)); + + // on new item remove relationship with org unit 1 + List newOrgUnitRels = relationshipService + .findByItemAndRelationshipType(context, newPublication, isOrgUnitOfPublication); + assertEquals(1, newOrgUnitRels.size()); + relationshipService.delete(context, newOrgUnitRels.get(0)); + + RelationshipBuilder.createRelationshipBuilder(context, newPublication, person2, isAuthorOfPublication) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, newPublication, orgUnit2, isOrgUnitOfPublication) + .build(); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 7 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person2, -1, -1, false, true), + containsInAnyOrder(List.of( + // NOTE: BOTH because new relationship + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit2, -1, -1, false, true), + containsInAnyOrder(List.of( + // NOTE: BOTH because new relationship + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + // NOTE: BOTH because new relationship + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 7 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person2, -1, -1, false, false), + containsInAnyOrder(List.of( + // NOTE: BOTH because new relationship + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit2, -1, -1, false, false), + containsInAnyOrder(List.of( + // NOTE: BOTH because new relationship + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + // NOTE: BOTH because new relationship + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////// + // do item install on new publication // + //////////////////////////////////////// + + WorkspaceItem newPublicationWSI = workspaceItemService.findByItem(context, newPublication); + installItemService.installItem(context, newPublicationWSI); + context.dispatchEvents(); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 7 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person2, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + empty() + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + empty() + ); + + assertThat( + relationshipService.findByItem(context, orgUnit2, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 7 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0), + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isAuthorOfPublication, person1, RIGHT_ONLY, 0, 0), + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, person2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isProjectOfPublication, project1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(originalPublication, isOrgUnitOfPublication, orgUnit1, RIGHT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(newPublication, isAuthorOfPublication, person1, BOTH, 0, 0), + isRel(newPublication, isAuthorOfPublication, person2, BOTH, 1, 0), + isRel(newPublication, isOrgUnitOfPublication, orgUnit2, BOTH, 0, 0) + )) + ); + + ////////////// + // clean up // + ////////////// + + // need to manually delete all relationships to avoid SQL constraint violation exception + List relationships = relationshipService.findAll(context); + for (Relationship relationship : relationships) { + relationshipService.delete(context, relationship); + } + } + + @Test + public void test_createNewVersionOfItemOnRightSideOfRelationships() throws Exception { + ////////////////////////////////////////// + // create a person with 3 relationships // + ////////////////////////////////////////// + + Item publication1 = ItemBuilder.createItem(context, collection) + .withTitle("publication 1") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + Item project1 = ItemBuilder.createItem(context, collection) + .withTitle("project 1") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + Item orgUnit1 = ItemBuilder.createItem(context, collection) + .withTitle("org unit 1") + .withMetadata("dspace", "entity", "type", orgUnitEntityType.getLabel()) + .build(); + + Item originalPerson = ItemBuilder.createItem(context, collection) + .withTitle("original person") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, publication1, originalPerson, isAuthorOfPublication) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, project1, originalPerson, isMemberOfProject) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, orgUnit1, originalPerson, isMemberOfOrgUnit) + .build(); + + ///////////////////////////////////////////////////////// + // verify that the relationships were properly created // + ///////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPerson, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, publication1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////// + // create a new version of the person // + //////////////////////////////////////// + + Version newVersion = versioningService.createNewVersion(context, originalPerson); + Item newPerson = newVersion.getItem(); + assertNotSame(originalPerson, newPerson); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPerson, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, publication1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPerson, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, newPerson, LEFT_ONLY, 0, 0), + isRel(project1, isMemberOfProject, newPerson, LEFT_ONLY, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, LEFT_ONLY, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPerson, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, publication1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, BOTH, 0, 0), + isRel(publication1, isAuthorOfPublication, newPerson, LEFT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(project1, isMemberOfProject, originalPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, newPerson, LEFT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, LEFT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPerson, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, newPerson, LEFT_ONLY, 0, 0), + isRel(project1, isMemberOfProject, newPerson, LEFT_ONLY, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, LEFT_ONLY, 0, 0) + )) + ); + + /////////////////////////////////// + // do item install on new person // + /////////////////////////////////// + + WorkspaceItem newPersonWSI = workspaceItemService.findByItem(context, newPerson); + installItemService.installItem(context, newPersonWSI); + context.dispatchEvents(); + + /////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = true) // + /////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPerson, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, LEFT_ONLY, 0, 0), + isRel(project1, isMemberOfProject, originalPerson, LEFT_ONLY, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, LEFT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, publication1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(project1, isMemberOfProject, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPerson, -1, -1, false, true), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, newPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, newPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, BOTH, 0, 0) + )) + ); + + //////////////////////////////////////////////////////////////////////// + // verify the relationships of all 5 items (excludeNonLatest = false) // + //////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPerson, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, LEFT_ONLY, 0, 0), + isRel(project1, isMemberOfProject, originalPerson, LEFT_ONLY, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, LEFT_ONLY, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, publication1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, originalPerson, LEFT_ONLY, 0, 0), + isRel(publication1, isAuthorOfPublication, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, project1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(project1, isMemberOfProject, originalPerson, LEFT_ONLY, 0, 0), + isRel(project1, isMemberOfProject, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, orgUnit1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(orgUnit1, isMemberOfOrgUnit, originalPerson, LEFT_ONLY, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, BOTH, 0, 0) + )) + ); + + assertThat( + relationshipService.findByItem(context, newPerson, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1, isAuthorOfPublication, newPerson, BOTH, 0, 0), + isRel(project1, isMemberOfProject, newPerson, BOTH, 0, 0), + isRel(orgUnit1, isMemberOfOrgUnit, newPerson, BOTH, 0, 0) + )) + ); + + ////////////// + // clean up // + ////////////// + + // need to manually delete all relationships to avoid SQL constraint violation exception + List relationships = relationshipService.findAll(context); + for (Relationship relationship : relationships) { + relationshipService.delete(context, relationship); + } + } + + @Test + public void test_createNewVersionOfItemAndVerifyMetadataOrder() throws Exception { + ///////////////////////////////////////// + // create a publication with 6 authors // + ///////////////////////////////////////// + + Item originalPublication = ItemBuilder.createItem(context, collection) + .withTitle("original publication") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + // author 1 (plain metadata) + itemService.addMetadata(context, originalPublication, "dc", "contributor", "author", null, "author 1 (plain)"); + + // author 2 (virtual) + Item author2 = ItemBuilder.createItem(context, collection) + .withTitle("author 2 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("2 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author2, isAuthorOfPublication) + .build(); + + // author 3 (virtual) + Item author3 = ItemBuilder.createItem(context, collection) + .withTitle("author 3 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("3 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author3, isAuthorOfPublication) + .build(); + + // author 4 (virtual) + Item author4 = ItemBuilder.createItem(context, collection) + .withTitle("author 4 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("4 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author4, isAuthorOfPublication) + .build(); + + // author 5 (virtual) + Item author5 = ItemBuilder.createItem(context, collection) + .withTitle("author 5 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("5 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author5, isAuthorOfPublication) + .build(); + + // author 6 (plain metadata) + itemService.addMetadata(context, originalPublication, "dc", "contributor", "author", null, "author 6 (plain)"); + + // author 7 (virtual) + Item author7 = ItemBuilder.createItem(context, collection) + .withTitle("author 7 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("7 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author7, isAuthorOfPublication) + .build(); + + // author 8 (plain metadata) + itemService.addMetadata(context, originalPublication, "dc", "contributor", "author", null, "author 8 (plain)"); + + // author 9 (virtual) + Item author9 = ItemBuilder.createItem(context, collection) + .withTitle("author 9 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("9 (item)") + .withPersonIdentifierLastName("author") + .build(); + RelationshipBuilder.createRelationshipBuilder(context, originalPublication, author9, isAuthorOfPublication) + .build(); + + //////////////////////////////// + // test dc.contributor.author // + //////////////////////////////// + + List oldMdvs = itemService.getMetadata( + originalPublication, "dc", "contributor", "author", Item.ANY + ); + assertEquals(9, oldMdvs.size()); + + assertFalse(oldMdvs.get(0) instanceof RelationshipMetadataValue); + assertEquals("author 1 (plain)", oldMdvs.get(0).getValue()); + assertEquals(0, oldMdvs.get(0).getPlace()); + + assertTrue(oldMdvs.get(1) instanceof RelationshipMetadataValue); + assertEquals("author, 2 (item)", oldMdvs.get(1).getValue()); + assertEquals(1, oldMdvs.get(1).getPlace()); + + assertTrue(oldMdvs.get(2) instanceof RelationshipMetadataValue); + assertEquals("author, 3 (item)", oldMdvs.get(2).getValue()); + assertEquals(2, oldMdvs.get(2).getPlace()); + + assertTrue(oldMdvs.get(3) instanceof RelationshipMetadataValue); + assertEquals("author, 4 (item)", oldMdvs.get(3).getValue()); + assertEquals(3, oldMdvs.get(3).getPlace()); + + assertTrue(oldMdvs.get(4) instanceof RelationshipMetadataValue); + assertEquals("author, 5 (item)", oldMdvs.get(4).getValue()); + assertEquals(4, oldMdvs.get(4).getPlace()); + + assertFalse(oldMdvs.get(5) instanceof RelationshipMetadataValue); + assertEquals("author 6 (plain)", oldMdvs.get(5).getValue()); + assertEquals(5, oldMdvs.get(5).getPlace()); + + assertTrue(oldMdvs.get(6) instanceof RelationshipMetadataValue); + assertEquals("author, 7 (item)", oldMdvs.get(6).getValue()); + assertEquals(6, oldMdvs.get(6).getPlace()); + + assertFalse(oldMdvs.get(7) instanceof RelationshipMetadataValue); + assertEquals("author 8 (plain)", oldMdvs.get(7).getValue()); + assertEquals(7, oldMdvs.get(7).getPlace()); + + assertTrue(oldMdvs.get(8) instanceof RelationshipMetadataValue); + assertEquals("author, 9 (item)", oldMdvs.get(8).getValue()); + assertEquals(8, oldMdvs.get(8).getPlace()); + + ///////////////////////////////////////////// + // test relationship isAuthorOfPublication // + ///////////////////////////////////////////// + + List oldRelationships = relationshipService.findByItem(context, originalPublication); + assertEquals(6, oldRelationships.size()); + + assertEquals(originalPublication, oldRelationships.get(0).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(0).getRelationshipType()); + assertEquals(author2, oldRelationships.get(0).getRightItem()); + assertEquals(1, oldRelationships.get(0).getLeftPlace()); + assertEquals(0, oldRelationships.get(0).getRightPlace()); + + assertEquals(originalPublication, oldRelationships.get(1).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(1).getRelationshipType()); + assertEquals(author3, oldRelationships.get(1).getRightItem()); + assertEquals(2, oldRelationships.get(1).getLeftPlace()); + assertEquals(0, oldRelationships.get(1).getRightPlace()); + + assertEquals(originalPublication, oldRelationships.get(2).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(2).getRelationshipType()); + assertEquals(author4, oldRelationships.get(2).getRightItem()); + assertEquals(3, oldRelationships.get(2).getLeftPlace()); + assertEquals(0, oldRelationships.get(2).getRightPlace()); + + assertEquals(originalPublication, oldRelationships.get(3).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(3).getRelationshipType()); + assertEquals(author5, oldRelationships.get(3).getRightItem()); + assertEquals(4, oldRelationships.get(3).getLeftPlace()); + assertEquals(0, oldRelationships.get(3).getRightPlace()); + + assertEquals(originalPublication, oldRelationships.get(4).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(4).getRelationshipType()); + assertEquals(author7, oldRelationships.get(4).getRightItem()); + assertEquals(6, oldRelationships.get(4).getLeftPlace()); + assertEquals(0, oldRelationships.get(4).getRightPlace()); + + assertEquals(originalPublication, oldRelationships.get(5).getLeftItem()); + assertEquals(isAuthorOfPublication, oldRelationships.get(5).getRelationshipType()); + assertEquals(author9, oldRelationships.get(5).getRightItem()); + assertEquals(8, oldRelationships.get(5).getLeftPlace()); + assertEquals(0, oldRelationships.get(5).getRightPlace()); + + /////////////////////////////////////// + // create new version of publication // + /////////////////////////////////////// + + Version newVersion = versioningService.createNewVersion(context, originalPublication); + Item newPublication = newVersion.getItem(); + assertNotSame(originalPublication, newPublication); + + //////////////////////////////// + // test dc.contributor.author // + //////////////////////////////// + + List newMdvs = itemService.getMetadata( + newPublication, "dc", "contributor", "author", Item.ANY + ); + assertEquals(9, newMdvs.size()); + + assertFalse(newMdvs.get(0) instanceof RelationshipMetadataValue); + assertEquals("author 1 (plain)", newMdvs.get(0).getValue()); + assertEquals(0, newMdvs.get(0).getPlace()); + + assertTrue(newMdvs.get(1) instanceof RelationshipMetadataValue); + assertEquals("author, 2 (item)", newMdvs.get(1).getValue()); + assertEquals(1, newMdvs.get(1).getPlace()); + + assertTrue(newMdvs.get(2) instanceof RelationshipMetadataValue); + assertEquals("author, 3 (item)", newMdvs.get(2).getValue()); + assertEquals(2, newMdvs.get(2).getPlace()); + + assertTrue(newMdvs.get(3) instanceof RelationshipMetadataValue); + assertEquals("author, 4 (item)", newMdvs.get(3).getValue()); + assertEquals(3, newMdvs.get(3).getPlace()); + + assertTrue(newMdvs.get(4) instanceof RelationshipMetadataValue); + assertEquals("author, 5 (item)", newMdvs.get(4).getValue()); + assertEquals(4, newMdvs.get(4).getPlace()); + + assertFalse(newMdvs.get(5) instanceof RelationshipMetadataValue); + assertEquals("author 6 (plain)", newMdvs.get(5).getValue()); + assertEquals(5, newMdvs.get(5).getPlace()); + + assertTrue(newMdvs.get(6) instanceof RelationshipMetadataValue); + assertEquals("author, 7 (item)", newMdvs.get(6).getValue()); + assertEquals(6, newMdvs.get(6).getPlace()); + + assertFalse(newMdvs.get(7) instanceof RelationshipMetadataValue); + assertEquals("author 8 (plain)", newMdvs.get(7).getValue()); + assertEquals(7, newMdvs.get(7).getPlace()); + + assertTrue(newMdvs.get(8) instanceof RelationshipMetadataValue); + assertEquals("author, 9 (item)", newMdvs.get(8).getValue()); + assertEquals(8, newMdvs.get(8).getPlace()); + + ///////////////////////////////////////////// + // test relationship isAuthorOfPublication // + ///////////////////////////////////////////// + + List newRelationships = relationshipService.findByItem(context, newPublication); + assertEquals(6, newRelationships.size()); + + assertEquals(newPublication, newRelationships.get(0).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(0).getRelationshipType()); + assertEquals(author2, newRelationships.get(0).getRightItem()); + assertEquals(1, newRelationships.get(0).getLeftPlace()); + assertEquals(0, newRelationships.get(0).getRightPlace()); + + assertEquals(newPublication, newRelationships.get(1).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(1).getRelationshipType()); + assertEquals(author3, newRelationships.get(1).getRightItem()); + assertEquals(2, newRelationships.get(1).getLeftPlace()); + assertEquals(0, newRelationships.get(1).getRightPlace()); + + assertEquals(newPublication, newRelationships.get(2).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(2).getRelationshipType()); + assertEquals(author4, newRelationships.get(2).getRightItem()); + assertEquals(3, newRelationships.get(2).getLeftPlace()); + assertEquals(0, newRelationships.get(2).getRightPlace()); + + assertEquals(newPublication, newRelationships.get(3).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(3).getRelationshipType()); + assertEquals(author5, newRelationships.get(3).getRightItem()); + assertEquals(4, newRelationships.get(3).getLeftPlace()); + assertEquals(0, newRelationships.get(3).getRightPlace()); + + assertEquals(newPublication, newRelationships.get(4).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(4).getRelationshipType()); + assertEquals(author7, newRelationships.get(4).getRightItem()); + assertEquals(6, newRelationships.get(4).getLeftPlace()); + assertEquals(0, newRelationships.get(4).getRightPlace()); + + assertEquals(newPublication, newRelationships.get(5).getLeftItem()); + assertEquals(isAuthorOfPublication, newRelationships.get(5).getRelationshipType()); + assertEquals(author9, newRelationships.get(5).getRightItem()); + assertEquals(8, newRelationships.get(5).getLeftPlace()); + assertEquals(0, newRelationships.get(5).getRightPlace()); + + ////////////// + // clean up // + ////////////// + + // need to manually delete all relationships to avoid SQL constraint violation exception + List relationships = relationshipService.findAll(context); + for (Relationship relationship : relationships) { + relationshipService.delete(context, relationship); + } + } + + /** + * This test will + * - create a publication with 10 projects + * - Remove, move, add projects + * - Verify the order remains correct + * @throws Exception + */ + @Test + public void test_createNewVersionOfItemWithAddRemoveMove() throws Exception { + /////////////////////////////////////////// + // create a publication with 10 projects // + /////////////////////////////////////////// + + Item originalPublication = ItemBuilder.createItem(context, collection) + .withTitle("original publication") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + List projects = new ArrayList<>(); + + for (int i = 0; i < 10; i++) { + Item project = ItemBuilder.createItem(context, collection) + .withTitle("project " + i) + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + projects.add(project); + + RelationshipBuilder + .createRelationshipBuilder(context, originalPublication, project, isProjectOfPublication) + .build(); + } + + AtomicInteger counterOriginalPublication = new AtomicInteger(); + List> listOriginalPublication = projects.stream().map( + project -> isRel(originalPublication, isProjectOfPublication, project, BOTH, + counterOriginalPublication.getAndIncrement(), 0) + ).collect(Collectors.toCollection(ArrayList::new)); + + ///////////////////////////////////////////////////////////////////// + // verify the relationships of all items (excludeNonLatest = true) // + ///////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(listOriginalPublication) + ); + + ////////////////////////////////////////////////////////////////////// + // verify the relationships of all items (excludeNonLatest = false) // + ////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(listOriginalPublication) + ); + + ///////////////////////////////////////////// + // create a new version of the publication // + ///////////////////////////////////////////// + + Version newVersion = versioningService.createNewVersion(context, originalPublication); + Item newPublication = newVersion.getItem(); + assertNotSame(originalPublication, newPublication); + + verifyProjectsMatch(originalPublication, projects, newPublication, projects, false);// + + ///////////////////////////////////////////// + // modify relationships on new publication // + ///////////////////////////////////////////// + + List newProjects = new ArrayList<>(projects); + assertEquals(newProjects.size(), 10); + + removeProject(newPublication, 5, newProjects); + + assertEquals(projects.size(), 10); + assertEquals(newProjects.size(), 9); + verifyProjectsMatch(originalPublication, projects, newPublication, newProjects, false); + + Item project6 = newProjects.get(6); + moveProject(newPublication, 6, 2, newProjects); + assertEquals(newProjects.size(), 9); + assertEquals(newProjects.get(2), project6); + assertNotEquals(projects.get(2), project6); + verifyProjectsMatch(originalPublication, projects, newPublication, newProjects, false); + + Item project1 = newProjects.get(1); + moveProject(newPublication, 1, 5, newProjects); + assertEquals(newProjects.size(), 9); + assertEquals(newProjects.get(5), project1); + assertNotEquals(projects.get(5), project1); + verifyProjectsMatch(originalPublication, projects, newPublication, newProjects, false); + + Item project = ItemBuilder.createItem(context, collection) + .withTitle("project 10") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + newProjects.add(4, project); + + RelationshipBuilder + .createRelationshipBuilder(context, newPublication, project, isProjectOfPublication, 4, -1) + .build(); + + verifyProjectsMatch(originalPublication, projects, newPublication, newProjects, false); + + //////////////////////////////////////// + // do item install on new publication // + //////////////////////////////////////// + + WorkspaceItem newPublicationWSI = workspaceItemService.findByItem(context, newPublication); + installItemService.installItem(context, newPublicationWSI); + context.dispatchEvents(); + + verifyProjectsMatch(originalPublication, projects, newPublication, newProjects, true); + + ////////////// + // clean up // + ////////////// + + // need to manually delete all relationships to avoid SQL constraint violation exception + List relationships = relationshipService.findAll(context); + for (Relationship relationship : relationships) { + relationshipService.delete(context, relationship); + } + } + + protected void removeProject(Item newPublication, int place, List newProjects) + throws SQLException, AuthorizeException { + List projectRels = relationshipService + .findByItemAndRelationshipType(context, newProjects.get(place), isProjectOfPublication, -1, -1, false) + .stream() + .filter( + relationship -> relationship.getLeftItem().equals(newPublication) + ) + .collect(Collectors.toCollection(ArrayList::new)); + assertEquals(1, projectRels.size()); + relationshipService.delete(context, projectRels.get(0)); + newProjects.remove(newProjects.get(place)); + } + + protected void moveProject(Item newPublication, int oldPlace, int newPlace, List newProjects) + throws SQLException, AuthorizeException { + Item project = newProjects.get(oldPlace); + List projectRels = relationshipService + .findByItemAndRelationshipType(context, project, isProjectOfPublication, -1, -1, false) + .stream() + .filter( + relationship -> relationship.getLeftItem().equals(newPublication) + ) + .collect(Collectors.toCollection(ArrayList::new)); + assertEquals(1, projectRels.size()); + relationshipService.move(context, projectRels.get(0), newPlace, null); + newProjects.remove(project); + newProjects.add(newPlace, project); + } + + protected void verifyProjectsMatch(Item originalPublication, List originalProjects, + Item newPublication, List newProjects, boolean newPublicationArchived) + throws SQLException { + + ///////////////////////////////////////////////////////// + // verify that the relationships were properly created // + ///////////////////////////////////////////////////////// + + AtomicInteger counterOriginalPublication = new AtomicInteger(); + List> listOriginalPublication = originalProjects.stream().map( + project -> isRel(originalPublication, isProjectOfPublication, project, + newPublicationArchived ? RIGHT_ONLY : BOTH, + counterOriginalPublication.getAndIncrement(), 0) + ).collect(Collectors.toCollection(ArrayList::new)); + + AtomicInteger counterNewPublication = new AtomicInteger(); + List> listNewPublication = newProjects.stream().map( + project -> isRel(newPublication, isProjectOfPublication, project, + newPublicationArchived || !originalProjects.contains(project) ? + BOTH : RIGHT_ONLY, + counterNewPublication.getAndIncrement(), 0) + ).collect(Collectors.toCollection(ArrayList::new)); + + ///////////////////////////////////////////////////////////////////// + // verify the relationships of all items (excludeNonLatest = true) // + ///////////////////////////////////////////////////////////////////// + + assertEquals( + relationshipService.countByItem(context, originalPublication, false, true), + originalProjects.size() + ); + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, true), + containsInAnyOrder(listOriginalPublication) + ); + + assertEquals( + relationshipService.countByItem(context, newPublication, false, true), + newProjects.size() + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, true), + containsInAnyOrder(listNewPublication) + ); + + ////////////////////////////////////////////////////////////////////// + // verify the relationships of all items (excludeNonLatest = false) // + ////////////////////////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, originalPublication, -1, -1, false, false), + containsInAnyOrder(listOriginalPublication) + ); + + assertThat( + relationshipService.findByItem(context, newPublication, -1, -1, false, false), + containsInAnyOrder(listNewPublication) + ); + } + + /** + * NOTE: If Spring bean classes would be created with the new keyword, nothing would be autowired. + */ + protected T createBean(Class beanClass) throws Exception { + AutowireCapableBeanFactory factory = DSpaceServicesFactory.getInstance().getServiceManager() + .getApplicationContext().getAutowireCapableBeanFactory(); + + T bean = beanClass.getDeclaredConstructor().newInstance(); + + factory.autowireBean(bean); + + return bean; + } + + /** + * Run the given callback with a virtual metadata config that's different from virtual-metadata.xml, + * and clean up after the callback has terminated. + * @param configModifier lambda that generates the temporary virtual metadata config. + * @param callback the callback that will be executed with the temporary virtual metadata config. + */ + protected void runWithVirtualMetadataConfig( + FailableSupplier>, Exception> configModifier, + FailableRunnable callback + ) throws Exception { + VirtualMetadataPopulator virtualMetadataPopulator = DSpaceServicesFactory.getInstance() + .getServiceManager().getServicesByType(VirtualMetadataPopulator.class).get(0); + + // keep reference to old config + Map> oldConfig = virtualMetadataPopulator.getMap(); + + try { + // set new config + Map> newConfig = configModifier.get(); + virtualMetadataPopulator.setMap(newConfig); + + // run the callback + callback.run(); + } finally { + // reset handlers + virtualMetadataPopulator.setMap(oldConfig); + } + } + + @Test + public void test_placeRecalculationAfterDelete() throws Exception { + // NOTE: this test uses relationship isIssueOfJournalVolume, because it adds virtual metadata + // on both sides of the relationship + + ///////////////////////////////////////// + // properly configure virtual metadata // + ///////////////////////////////////////// + + ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); + + // virtual metadata field publicationissue.issueNumber needs to be used in place calculations + Collected issueVmd = serviceManager.getServiceByName("journalIssue_number", Collected.class); + assertNotNull(issueVmd); + boolean ogIssueVmdUseForPlace = issueVmd.getUseForPlace(); + issueVmd.setUseForPlace(true); + + ////////////////// + // create items // + ////////////////// + + // journal volume 1.1 + Item v1_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal volume 1") + .withMetadata("dspace", "entity", "type", journalVolumeEntityType.getLabel()) + .withMetadata("publicationvolume", "volumeNumber", null, "volume nr 3 (rel)") + .build(); + + // journal issue 1.1 + Item i1_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 1") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 1 (rel)") + .build(); + + // journal issue 3.1 + Item i3_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 3") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 3 (rel)") + .build(); + + // journal issue 5.1 + Item i5_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 5") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 5 (rel)") + .build(); + + ////////////////////////////////////////////// + // create relationships and metadata values // + ////////////////////////////////////////////// + + // relationship - volume 1 & issue 1 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i1_1, isIssueOfJournalVolume).build(); + + // metadata - volume 1 & issue 2 + itemService.addMetadata(context, v1_1, "publicationissue", "issueNumber", null, null, "issue nr 2 (plain)"); + + // relationship - volume 1 & issue 3 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i3_1, isIssueOfJournalVolume).build(); + + // metadata - volume 1 & issue 4 + itemService.addMetadata(context, v1_1, "publicationissue", "issueNumber", null, null, "issue nr 4 (plain)"); + + // relationship - volume 1 & issue 5 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i5_1, isIssueOfJournalVolume).build(); + + // metadata - volume 1 & issue 6 + itemService.addMetadata(context, v1_1, "publicationissue", "issueNumber", null, null, "issue nr 6 (plain)"); + + // SUMMARY + // + // volume 3 + // - pos 0: issue 1 (rel) + // - pos 1: issue 2 (plain) + // - pos 2: issue 3 (rel) + // - pos 3: issue 4 (plain) + // - pos 4: issue 5 (rel) + // - pos 5: issue 6 (plain) + + ///////////////////////////////// + // initial - verify volume 3.1 // + ///////////////////////////////// + + List mdvs1 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(6, mdvs1.size()); + + assertTrue(mdvs1.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs1.get(0).getValue()); + assertEquals(0, mdvs1.get(0).getPlace()); + + assertFalse(mdvs1.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs1.get(1).getValue()); + assertEquals(1, mdvs1.get(1).getPlace()); + + assertTrue(mdvs1.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs1.get(2).getValue()); + assertEquals(2, mdvs1.get(2).getPlace()); + + assertFalse(mdvs1.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs1.get(3).getValue()); + assertEquals(3, mdvs1.get(3).getPlace()); + + assertTrue(mdvs1.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs1.get(4).getValue()); + assertEquals(4, mdvs1.get(4).getPlace()); + + assertFalse(mdvs1.get(5) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs1.get(5).getValue()); + assertEquals(5, mdvs1.get(5).getPlace()); + + ///////////////////////////////////// + // create new version - volume 1.2 // + ///////////////////////////////////// + + Item v1_2 = versioningService.createNewVersion(context, v1_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, v1_2)); + context.commit(); + + //////////////////////////////////// + // create new version - issue 3.2 // + //////////////////////////////////// + + Item i3_2 = versioningService.createNewVersion(context, i3_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, i3_2)); + context.commit(); + + //////////////////////////////////////////////// + // after version creation - verify volume 1.1 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs4 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(6, mdvs4.size()); + + assertTrue(mdvs4.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs4.get(0).getValue()); + assertEquals(0, mdvs4.get(0).getPlace()); + + assertFalse(mdvs4.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs4.get(1).getValue()); + assertEquals(1, mdvs4.get(1).getPlace()); + + assertTrue(mdvs4.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs4.get(2).getValue()); + assertEquals(2, mdvs4.get(2).getPlace()); + + assertFalse(mdvs4.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs4.get(3).getValue()); + assertEquals(3, mdvs4.get(3).getPlace()); + + assertTrue(mdvs4.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs4.get(4).getValue()); + assertEquals(4, mdvs4.get(4).getPlace()); + + assertFalse(mdvs4.get(5) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs4.get(5).getValue()); + assertEquals(5, mdvs4.get(5).getPlace()); + + //////////////////////////////////////////////// + // after version creation - verify volume 1.2 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 4, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_2, BOTH, 2, 0) + )) + ); + + List mdvs7 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(6, mdvs7.size()); + + assertTrue(mdvs7.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs7.get(0).getValue()); + assertEquals(0, mdvs7.get(0).getPlace()); + + assertFalse(mdvs7.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs7.get(1).getValue()); + assertEquals(1, mdvs7.get(1).getPlace()); + + assertTrue(mdvs7.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs7.get(2).getValue()); + assertEquals(2, mdvs7.get(2).getPlace()); + + assertFalse(mdvs7.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs7.get(3).getValue()); + assertEquals(3, mdvs7.get(3).getPlace()); + + assertTrue(mdvs7.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs7.get(4).getValue()); + assertEquals(4, mdvs7.get(4).getPlace()); + + assertFalse(mdvs7.get(5) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs7.get(5).getValue()); + assertEquals(5, mdvs7.get(5).getPlace()); + + /////////////////////////////////////////////////////////// + // remove relationship - volume 1.2 & issue 3.2 // + // since an issue needs a relationship, delete the issue // + /////////////////////////////////////////////////////////// + + Relationship rel1 = getRelationship(v1_2, isIssueOfJournalVolume, i3_2); + assertNotNull(rel1); + + itemService.delete(context, context.reloadEntity(i3_2)); + + context.commit(); + + //////////////////////////////////// + // after remove 1 - cache busting // + //////////////////////////////////// + + v1_2.setMetadataModified(); + v1_2 = context.reloadEntity(v1_2); + + //////////////////////////////////////// + // after remove 1 - verify volume 3.1 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs9 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(6, mdvs9.size()); + + assertTrue(mdvs9.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs9.get(0).getValue()); + assertEquals(0, mdvs9.get(0).getPlace()); + + assertFalse(mdvs9.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs9.get(1).getValue()); + assertEquals(1, mdvs9.get(1).getPlace()); + + assertTrue(mdvs9.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs9.get(2).getValue()); + assertEquals(2, mdvs9.get(2).getPlace()); + + assertFalse(mdvs9.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs9.get(3).getValue()); + assertEquals(3, mdvs9.get(3).getPlace()); + + assertTrue(mdvs9.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs9.get(4).getValue()); + assertEquals(4, mdvs9.get(4).getPlace()); + + assertFalse(mdvs9.get(5) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs9.get(5).getValue()); + assertEquals(5, mdvs9.get(5).getPlace()); + + //////////////////////////////////////// + // after remove 1 - verify volume 1.2 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + // NOTE: left place was reduced by one + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 3, 0) + )) + ); + + List mdvs12 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs12.size()); + + assertTrue(mdvs12.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs12.get(0).getValue()); + assertEquals(0, mdvs12.get(0).getPlace()); + + assertFalse(mdvs12.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs12.get(1).getValue()); + assertEquals(1, mdvs12.get(1).getPlace()); + + assertFalse(mdvs12.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs12.get(2).getValue()); + assertEquals(2, mdvs12.get(2).getPlace()); + + assertTrue(mdvs12.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs12.get(3).getValue()); + assertEquals(3, mdvs12.get(3).getPlace()); + + assertFalse(mdvs12.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs12.get(4).getValue()); + assertEquals(4, mdvs12.get(4).getPlace()); + + //////////////////////////////////////// + // remove metadata value - volume 1.2 // + //////////////////////////////////////// + + MetadataValue removeMdv1 = mdvs12.get(2); + + // let's make sure we have the metadata value that we intended to remove + assertFalse(removeMdv1 instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", removeMdv1.getValue()); + assertEquals(2, removeMdv1.getPlace()); + assertEquals(v1_2, removeMdv1.getDSpaceObject()); + + itemService.removeMetadataValues(context, v1_2, List.of(removeMdv1)); + // NOTE: after removal, update is required to do place recalculation, among other things + itemService.update(context, v1_2); + context.commit(); + + //////////////////////////////////////// + // after remove 2 - verify volume 1.1 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs14 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(6, mdvs14.size()); + + assertTrue(mdvs14.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs14.get(0).getValue()); + assertEquals(0, mdvs14.get(0).getPlace()); + + assertFalse(mdvs14.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs14.get(1).getValue()); + assertEquals(1, mdvs14.get(1).getPlace()); + + assertTrue(mdvs14.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs14.get(2).getValue()); + assertEquals(2, mdvs14.get(2).getPlace()); + + assertFalse(mdvs14.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (plain)", mdvs14.get(3).getValue()); + assertEquals(3, mdvs14.get(3).getPlace()); + + assertTrue(mdvs14.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs14.get(4).getValue()); + assertEquals(4, mdvs14.get(4).getPlace()); + + assertFalse(mdvs14.get(5) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs14.get(5).getValue()); + assertEquals(5, mdvs14.get(5).getPlace()); + + //////////////////////////////////////// + // after remove 2 - verify volume 1.2 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + // NOTE: left place was reduced by one (from 3 to 2) + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 2, 0) + )) + ); + + List mdvs17 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(4, mdvs17.size()); + + assertTrue(mdvs17.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs17.get(0).getValue()); + assertEquals(0, mdvs17.get(0).getPlace()); + + assertFalse(mdvs17.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (plain)", mdvs17.get(1).getValue()); + assertEquals(1, mdvs17.get(1).getPlace()); + + assertTrue(mdvs17.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs17.get(2).getValue()); + assertEquals(2, mdvs17.get(2).getPlace()); + + assertFalse(mdvs17.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 6 (plain)", mdvs17.get(3).getValue()); + assertEquals(3, mdvs17.get(3).getPlace()); + + ///////////////////////////////////////////// + // delete volume first for min cardinality // + ///////////////////////////////////////////// + + itemService.delete(context, context.reloadEntity(v1_1)); + itemService.delete(context, context.reloadEntity(v1_2)); + + ///////////////////////////// + // clean up config changes // + ///////////////////////////// + + issueVmd.setUseForPlace(ogIssueVmdUseForPlace); + } + + @Test + public void test_placeRecalculationAfterDelete_complex() throws Exception { + runWithVirtualMetadataConfig( + () -> { + // config summary: + // on the Project items, metadata field dc.contributor.author will appear with the Authors' titles + // on the Person items, metadata field dc.relation will appear with the Projects' titles + + Collected dcRelation = createBean(Collected.class); + dcRelation.setFields(List.of("dc.title")); + dcRelation.setUseForPlace(true); + + Collected dcContributorAuthor = createBean(Collected.class); + dcContributorAuthor.setFields(List.of("dc.title")); + dcContributorAuthor.setUseForPlace(true); + + return Map.of( + "isProjectOfPerson", new HashMap<>(Map.of( + "dc.relation", dcRelation + )), + "isPersonOfProject", new HashMap<>(Map.of( + "dc.contributor.author", dcContributorAuthor + )) + ); + }, + () -> { + ////////////////// + // create items // + ////////////////// + + // person 1.1 + Item pe1_1 = ItemBuilder.createItem(context, collection) + .withTitle("person 1 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + // person 3.1 + Item pe3_1 = ItemBuilder.createItem(context, collection) + .withTitle("person 3 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + // person 5.1 + Item pe5_1 = ItemBuilder.createItem(context, collection) + .withTitle("person 5 (item)") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .build(); + + // project 1.1 + Item pr1_1 = ItemBuilder.createItem(context, collection) + .withTitle("project 1 (item)") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + // project 3.1 + Item pr3_1 = ItemBuilder.createItem(context, collection) + .withTitle("project 3 (item)") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + // project 5.1 + Item pr5_1 = ItemBuilder.createItem(context, collection) + .withTitle("project 5 (item)") + .withMetadata("dspace", "entity", "type", projectEntityType.getLabel()) + .build(); + + ////////////////////////////////////////////// + // create relationships and metadata values // + ////////////////////////////////////////////// + + // relationship - person 3 & project 1 + RelationshipBuilder.createRelationshipBuilder(context, pe3_1, pr1_1, isProjectOfPerson) + .build(); + + // metadata - person 3 & project 2 + itemService.addMetadata(context, pe3_1, "dc", "relation", null, null, "project 2 (mdv)"); + + // relationship - person 1 & project 3 + RelationshipBuilder.createRelationshipBuilder(context, pe1_1, pr3_1, isProjectOfPerson) + .build(); + + // metadata - person 2 & project 3 + itemService.addMetadata(context, pr3_1, "dc", "contributor", "author", null, "person 2 (mdv)"); + + // relationship - person 3 & project 3 + RelationshipBuilder.createRelationshipBuilder(context, pe3_1, pr3_1, isProjectOfPerson) + .build(); + + // metadata - person 4 & project 3 + itemService.addMetadata(context, pr3_1, "dc", "contributor", "author", null, "person 4 (mdv)"); + + // relationship - person 5 & project 3 + RelationshipBuilder.createRelationshipBuilder(context, pe5_1, pr3_1, isProjectOfPerson) + .build(); + + // metadata - person 6 & project 3 + itemService.addMetadata(context, pr3_1, "dc", "contributor", "author", null, "person 6 (mdv)"); + + // metadata - person 7 & project 5 + itemService.addMetadata(context, pr5_1, "dc", "contributor", "author", null, "person 7 (mdv)"); + + // relationship - person 5 & project 5 + RelationshipBuilder.createRelationshipBuilder(context, pe5_1, pr5_1, isProjectOfPerson) + .build(); + + // metadata - person 3 & project 4 + itemService.addMetadata(context, pe3_1, "dc", "relation", null, null, "project 4 (mdv)"); + + // relationship - person 3 & project 5 + RelationshipBuilder.createRelationshipBuilder(context, pe3_1, pr5_1, isProjectOfPerson) + .build(); + + // metadata - person 3 & project 6 + itemService.addMetadata(context, pe3_1, "dc", "relation", null, null, "project 6 (mdv)"); + + // SUMMARY + // + // person 3 + // - pos 0: project 1 (item) + // - pos 1: project 2 (mdv) + // - pos 2: project 3 (item) [A] + // - pos 3: project 4 (mdv) + // - pos 4: project 5 (item) [B] + // - pos 5: project 6 (mdv) + // + // project 3 + // - pos 0: person 1 (item) + // - pos 1: person 2 (mdv) + // - pos 2: person 3 (item) [A] + // - pos 3: person 4 (mdv) + // - pos 4: person 5 (item) + // - pos 5: person 6 (mdv) + // + // project 5 + // - pos 0: person 7 (mdv) + // - pos 1: person 5 (item) + // - pos 2: person 3 (item) [B] + + ///////////////////////////////// + // initial - verify person 3.1 // + ///////////////////////////////// + + List mdvs1 = itemService.getMetadata( + pe3_1, "dc", "relation", null, Item.ANY + ); + assertEquals(6, mdvs1.size()); + + assertTrue(mdvs1.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs1.get(0).getValue()); + assertEquals(0, mdvs1.get(0).getPlace()); + + assertFalse(mdvs1.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs1.get(1).getValue()); + assertEquals(1, mdvs1.get(1).getPlace()); + + assertTrue(mdvs1.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 3 (item)", mdvs1.get(2).getValue()); + assertEquals(2, mdvs1.get(2).getPlace()); + + assertFalse(mdvs1.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs1.get(3).getValue()); + assertEquals(3, mdvs1.get(3).getPlace()); + + assertTrue(mdvs1.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs1.get(4).getValue()); + assertEquals(4, mdvs1.get(4).getPlace()); + + assertFalse(mdvs1.get(5) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs1.get(5).getValue()); + assertEquals(5, mdvs1.get(5).getPlace()); + + ////////////////////////////////// + // initial - verify project 3.1 // + ////////////////////////////////// + + List mdvs2 = itemService.getMetadata( + pr3_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(6, mdvs2.size()); + + assertTrue(mdvs2.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs2.get(0).getValue()); + assertEquals(0, mdvs2.get(0).getPlace()); + + assertFalse(mdvs2.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs2.get(1).getValue()); + assertEquals(1, mdvs2.get(1).getPlace()); + + assertTrue(mdvs2.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs2.get(2).getValue()); + assertEquals(2, mdvs2.get(2).getPlace()); + + assertFalse(mdvs2.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs2.get(3).getValue()); + assertEquals(3, mdvs2.get(3).getPlace()); + + assertTrue(mdvs2.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs2.get(4).getValue()); + assertEquals(4, mdvs2.get(4).getPlace()); + + assertFalse(mdvs2.get(5) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs2.get(5).getValue()); + assertEquals(5, mdvs2.get(5).getPlace()); + + ////////////////////////////////// + // initial - verify project 5.1 // + ////////////////////////////////// + + List mdvs3 = itemService.getMetadata( + pr5_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(3, mdvs3.size()); + + assertFalse(mdvs3.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 7 (mdv)", mdvs3.get(0).getValue()); + assertEquals(0, mdvs3.get(0).getPlace()); + + assertTrue(mdvs3.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs3.get(1).getValue()); + assertEquals(1, mdvs3.get(1).getPlace()); + + assertTrue(mdvs3.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs3.get(2).getValue()); + assertEquals(2, mdvs3.get(2).getPlace()); + + ///////////////////////////////////// + // create new version - person 3.2 // + ///////////////////////////////////// + + Item pe3_2 = versioningService.createNewVersion(context, pe3_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, pe3_2)); + context.commit(); + + ////////////////////////////////////// + // create new version - project 3.2 // + ////////////////////////////////////// + + Item pr3_2 = versioningService.createNewVersion(context, pr3_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, pr3_2)); + context.commit(); + + //////////////////////////////////////////////// + // after version creation - verify person 3.1 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_1, isProjectOfPerson, pr1_1, RIGHT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2) + )) + ); + + List mdvs4 = itemService.getMetadata( + pe3_1, "dc", "relation", null, Item.ANY + ); + assertEquals(6, mdvs4.size()); + + assertTrue(mdvs4.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs4.get(0).getValue()); + assertEquals(0, mdvs4.get(0).getPlace()); + + assertFalse(mdvs4.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs4.get(1).getValue()); + assertEquals(1, mdvs4.get(1).getPlace()); + + assertTrue(mdvs4.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 3 (item)", mdvs4.get(2).getValue()); + assertEquals(2, mdvs4.get(2).getPlace()); + + assertFalse(mdvs4.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs4.get(3).getValue()); + assertEquals(3, mdvs4.get(3).getPlace()); + + assertTrue(mdvs4.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs4.get(4).getValue()); + assertEquals(4, mdvs4.get(4).getPlace()); + + assertFalse(mdvs4.get(5) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs4.get(5).getValue()); + assertEquals(5, mdvs4.get(5).getPlace()); + + ///////////////////////////////////////////////// + // after version creation - verify project 3.1 // + ///////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe5_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 4), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2) + )) + ); + + List mdvs5 = itemService.getMetadata( + pr3_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(6, mdvs5.size()); + + assertTrue(mdvs5.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs5.get(0).getValue()); + assertEquals(0, mdvs5.get(0).getPlace()); + + assertFalse(mdvs5.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs5.get(1).getValue()); + assertEquals(1, mdvs5.get(1).getPlace()); + + assertTrue(mdvs5.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs5.get(2).getValue()); + assertEquals(2, mdvs5.get(2).getPlace()); + + assertFalse(mdvs5.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs5.get(3).getValue()); + assertEquals(3, mdvs5.get(3).getPlace()); + + assertTrue(mdvs5.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs5.get(4).getValue()); + assertEquals(4, mdvs5.get(4).getPlace()); + + assertFalse(mdvs5.get(5) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs5.get(5).getValue()); + assertEquals(5, mdvs5.get(5).getPlace()); + + ///////////////////////////////////////////////// + // after version creation - verify project 5.1 // + ///////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr5_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe5_1, isProjectOfPerson, pr5_1, BOTH, 1, 1), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2), + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 4, 2) + )) + ); + + List mdvs6 = itemService.getMetadata( + pr5_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(3, mdvs6.size()); + + assertFalse(mdvs6.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 7 (mdv)", mdvs6.get(0).getValue()); + assertEquals(0, mdvs6.get(0).getPlace()); + + assertTrue(mdvs6.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs6.get(1).getValue()); + assertEquals(1, mdvs6.get(1).getPlace()); + + assertTrue(mdvs6.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs6.get(2).getValue()); + assertEquals(2, mdvs6.get(2).getPlace()); + + //////////////////////////////////////////////// + // after version creation - verify volume 3.2 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_2, isProjectOfPerson, pr1_1, BOTH, 0, 0), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2), + isRel(pe3_2, isProjectOfPerson, pr3_2, BOTH, 2, 2), + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 4, 2) + )) + ); + + List mdvs7 = itemService.getMetadata( + pe3_2, "dc", "relation", null, Item.ANY + ); + assertEquals(6, mdvs7.size()); + + assertTrue(mdvs7.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs7.get(0).getValue()); + assertEquals(0, mdvs7.get(0).getPlace()); + + assertFalse(mdvs7.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs7.get(1).getValue()); + assertEquals(1, mdvs7.get(1).getPlace()); + + assertTrue(mdvs7.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 3 (item)", mdvs7.get(2).getValue()); + assertEquals(2, mdvs7.get(2).getPlace()); + + assertFalse(mdvs7.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs7.get(3).getValue()); + assertEquals(3, mdvs7.get(3).getPlace()); + + assertTrue(mdvs7.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs7.get(4).getValue()); + assertEquals(4, mdvs7.get(4).getPlace()); + + assertFalse(mdvs7.get(5) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs7.get(5).getValue()); + assertEquals(5, mdvs7.get(5).getPlace()); + + ///////////////////////////////////////////////// + // after version creation - verify project 3.2 // + ///////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_2, BOTH, 0, 0), + isRel(pe5_1, isProjectOfPerson, pr3_2, BOTH, 0, 4), + isRel(pe3_2, isProjectOfPerson, pr3_2, BOTH, 2, 2) + )) + ); + + List mdvs8 = itemService.getMetadata( + pr3_2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(6, mdvs8.size()); + + assertTrue(mdvs8.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs8.get(0).getValue()); + assertEquals(0, mdvs8.get(0).getPlace()); + + assertFalse(mdvs8.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs8.get(1).getValue()); + assertEquals(1, mdvs8.get(1).getPlace()); + + assertTrue(mdvs8.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs8.get(2).getValue()); + assertEquals(2, mdvs8.get(2).getPlace()); + + assertFalse(mdvs8.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs8.get(3).getValue()); + assertEquals(3, mdvs8.get(3).getPlace()); + + assertTrue(mdvs8.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs8.get(4).getValue()); + assertEquals(4, mdvs8.get(4).getPlace()); + + assertFalse(mdvs8.get(5) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs8.get(5).getValue()); + assertEquals(5, mdvs8.get(5).getPlace()); + + //////////////////////////////////////////////////// + // remove relationship - person 3.2 & project 3.2 // + //////////////////////////////////////////////////// + + Relationship rel1 = getRelationship(pe3_2, isProjectOfPerson, pr3_2); + assertNotNull(rel1); + + relationshipService.delete(context, rel1, false, false); + context.commit(); + + //////////////////////////////////// + // after remove 1 - cache busting // + //////////////////////////////////// + + pe3_2.setMetadataModified(); + pe3_2 = context.reloadEntity(pe3_2); + + pr3_2.setMetadataModified(); + pr3_2 = context.reloadEntity(pr3_2); + + //////////////////////////////////////// + // after remove 1 - verify person 3.1 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_1, isProjectOfPerson, pr1_1, RIGHT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2) + )) + ); + + List mdvs9 = itemService.getMetadata( + pe3_1, "dc", "relation", null, Item.ANY + ); + assertEquals(6, mdvs9.size()); + + assertTrue(mdvs9.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs9.get(0).getValue()); + assertEquals(0, mdvs9.get(0).getPlace()); + + assertFalse(mdvs9.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs9.get(1).getValue()); + assertEquals(1, mdvs9.get(1).getPlace()); + + assertTrue(mdvs9.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 3 (item)", mdvs9.get(2).getValue()); + assertEquals(2, mdvs9.get(2).getPlace()); + + assertFalse(mdvs9.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs9.get(3).getValue()); + assertEquals(3, mdvs9.get(3).getPlace()); + + assertTrue(mdvs9.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs9.get(4).getValue()); + assertEquals(4, mdvs9.get(4).getPlace()); + + assertFalse(mdvs9.get(5) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs9.get(5).getValue()); + assertEquals(5, mdvs9.get(5).getPlace()); + + ///////////////////////////////////////// + // after remove 1 - verify project 3.1 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2), + isRel(pe5_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 4) + )) + ); + + List mdvs10 = itemService.getMetadata( + pr3_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(6, mdvs10.size()); + + assertTrue(mdvs10.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs10.get(0).getValue()); + assertEquals(0, mdvs10.get(0).getPlace()); + + assertFalse(mdvs10.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs10.get(1).getValue()); + assertEquals(1, mdvs10.get(1).getPlace()); + + assertTrue(mdvs10.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs10.get(2).getValue()); + assertEquals(2, mdvs10.get(2).getPlace()); + + assertFalse(mdvs10.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs10.get(3).getValue()); + assertEquals(3, mdvs10.get(3).getPlace()); + + assertTrue(mdvs10.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs10.get(4).getValue()); + assertEquals(4, mdvs10.get(4).getPlace()); + + assertFalse(mdvs10.get(5) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs10.get(5).getValue()); + assertEquals(5, mdvs10.get(5).getPlace()); + + ///////////////////////////////////////// + // after remove 1 - verify project 5.1 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr5_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe5_1, isProjectOfPerson, pr5_1, BOTH, 1, 1), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2), + // NOTE: left place was reduced by one + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 3, 2) + )) + ); + + List mdvs11 = itemService.getMetadata( + pr5_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(3, mdvs11.size()); + + assertFalse(mdvs11.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 7 (mdv)", mdvs11.get(0).getValue()); + assertEquals(0, mdvs11.get(0).getPlace()); + + assertTrue(mdvs11.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs11.get(1).getValue()); + assertEquals(1, mdvs11.get(1).getPlace()); + + assertTrue(mdvs11.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs11.get(2).getValue()); + assertEquals(2, mdvs11.get(2).getPlace()); + + //////////////////////////////////////// + // after remove 1 - verify person 3.2 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_2, isProjectOfPerson, pr1_1, BOTH, 0, 0), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2), + // NOTE: left place was reduced by one (from 4 to 3) + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 3, 2) + )) + ); + + List mdvs12 = itemService.getMetadata( + pe3_2, "dc", "relation", null, Item.ANY + ); + assertEquals(5, mdvs12.size()); + + assertTrue(mdvs12.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs12.get(0).getValue()); + assertEquals(0, mdvs12.get(0).getPlace()); + + assertFalse(mdvs12.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs12.get(1).getValue()); + assertEquals(1, mdvs12.get(1).getPlace()); + + assertFalse(mdvs12.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs12.get(2).getValue()); + assertEquals(2, mdvs12.get(2).getPlace()); + + assertTrue(mdvs12.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs12.get(3).getValue()); + assertEquals(3, mdvs12.get(3).getPlace()); + + assertFalse(mdvs12.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs12.get(4).getValue()); + assertEquals(4, mdvs12.get(4).getPlace()); + + ///////////////////////////////////////// + // after remove 1 - verify project 3.2 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_2, BOTH, 0, 0), + // NOTE: right place was reduced by one (from 4 to 3) + isRel(pe5_1, isProjectOfPerson, pr3_2, BOTH, 0, 3) + )) + ); + + List mdvs13 = itemService.getMetadata( + pr3_2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(5, mdvs13.size()); + + assertTrue(mdvs13.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs13.get(0).getValue()); + assertEquals(0, mdvs13.get(0).getPlace()); + + assertFalse(mdvs13.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs13.get(1).getValue()); + assertEquals(1, mdvs13.get(1).getPlace()); + + assertFalse(mdvs13.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs13.get(2).getValue()); + assertEquals(2, mdvs13.get(2).getPlace()); + + assertTrue(mdvs13.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs13.get(3).getValue()); + assertEquals(3, mdvs13.get(3).getPlace()); + + assertFalse(mdvs13.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs13.get(4).getValue()); + assertEquals(4, mdvs13.get(4).getPlace()); + + //////////////////////////////////////// + // remove metadata value - person 3.2 // + //////////////////////////////////////// + + MetadataValue removeMdv1 = mdvs12.get(2); + + // let's make sure we have the metadata value that we intended to remove + assertFalse(removeMdv1 instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", removeMdv1.getValue()); + assertEquals(2, removeMdv1.getPlace()); + assertEquals(pe3_2, removeMdv1.getDSpaceObject()); + + itemService.removeMetadataValues(context, pe3_2, List.of(removeMdv1)); + itemService.update(context, pe3_2); + context.commit(); + + //////////////////////////////////////// + // after remove 2 - verify person 3.1 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_1, isProjectOfPerson, pr1_1, RIGHT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2) + )) + ); + + List mdvs14 = itemService.getMetadata( + pe3_1, "dc", "relation", null, Item.ANY + ); + assertEquals(6, mdvs14.size()); + + assertTrue(mdvs14.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs14.get(0).getValue()); + assertEquals(0, mdvs14.get(0).getPlace()); + + assertFalse(mdvs14.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs14.get(1).getValue()); + assertEquals(1, mdvs14.get(1).getPlace()); + + assertTrue(mdvs14.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 3 (item)", mdvs14.get(2).getValue()); + assertEquals(2, mdvs14.get(2).getPlace()); + + assertFalse(mdvs14.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 4 (mdv)", mdvs14.get(3).getValue()); + assertEquals(3, mdvs14.get(3).getPlace()); + + assertTrue(mdvs14.get(4) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs14.get(4).getValue()); + assertEquals(4, mdvs14.get(4).getPlace()); + + assertFalse(mdvs14.get(5) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs14.get(5).getValue()); + assertEquals(5, mdvs14.get(5).getPlace()); + + ///////////////////////////////////////// + // after remove 2 - verify project 3.1 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 0), + isRel(pe3_1, isProjectOfPerson, pr3_1, RIGHT_ONLY, 2, 2), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2), + isRel(pe5_1, isProjectOfPerson, pr3_1, LEFT_ONLY, 0, 4) + )) + ); + + List mdvs15 = itemService.getMetadata( + pr3_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(6, mdvs15.size()); + + assertTrue(mdvs15.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs15.get(0).getValue()); + assertEquals(0, mdvs15.get(0).getPlace()); + + assertFalse(mdvs15.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs15.get(1).getValue()); + assertEquals(1, mdvs15.get(1).getPlace()); + + assertTrue(mdvs15.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs15.get(2).getValue()); + assertEquals(2, mdvs15.get(2).getPlace()); + + assertFalse(mdvs15.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs15.get(3).getValue()); + assertEquals(3, mdvs15.get(3).getPlace()); + + assertTrue(mdvs15.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs15.get(4).getValue()); + assertEquals(4, mdvs15.get(4).getPlace()); + + assertFalse(mdvs15.get(5) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs15.get(5).getValue()); + assertEquals(5, mdvs15.get(5).getPlace()); + + ///////////////////////////////////////// + // after remove 2 - verify project 5.1 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr5_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe5_1, isProjectOfPerson, pr5_1, BOTH, 1, 1), + isRel(pe3_1, isProjectOfPerson, pr5_1, RIGHT_ONLY, 4, 2), + // NOTE: left place was reduced by one + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 2, 2) + )) + ); + + List mdvs16 = itemService.getMetadata( + pr5_1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(3, mdvs16.size()); + + assertFalse(mdvs16.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 7 (mdv)", mdvs16.get(0).getValue()); + assertEquals(0, mdvs16.get(0).getPlace()); + + assertTrue(mdvs16.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs16.get(1).getValue()); + assertEquals(1, mdvs16.get(1).getPlace()); + + assertTrue(mdvs16.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 3 (item)", mdvs16.get(2).getValue()); + assertEquals(2, mdvs16.get(2).getPlace()); + + //////////////////////////////////////// + // after remove 2 - verify person 3.2 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pe3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe3_2, isProjectOfPerson, pr1_1, BOTH, 0, 0), + isRel(pe3_2, isProjectOfPerson, pr3_1, LEFT_ONLY, 2, 2), + // NOTE: left place was reduced by one (from 3 to 2) + isRel(pe3_2, isProjectOfPerson, pr5_1, BOTH, 2, 2) + )) + ); + + List mdvs17 = itemService.getMetadata( + pe3_2, "dc", "relation", null, Item.ANY + ); + assertEquals(4, mdvs17.size()); + + assertTrue(mdvs17.get(0) instanceof RelationshipMetadataValue); + assertEquals("project 1 (item)", mdvs17.get(0).getValue()); + assertEquals(0, mdvs17.get(0).getPlace()); + + assertFalse(mdvs17.get(1) instanceof RelationshipMetadataValue); + assertEquals("project 2 (mdv)", mdvs17.get(1).getValue()); + assertEquals(1, mdvs17.get(1).getPlace()); + + assertTrue(mdvs17.get(2) instanceof RelationshipMetadataValue); + assertEquals("project 5 (item)", mdvs17.get(2).getValue()); + assertEquals(2, mdvs17.get(2).getPlace()); + + assertFalse(mdvs17.get(3) instanceof RelationshipMetadataValue); + assertEquals("project 6 (mdv)", mdvs17.get(3).getValue()); + assertEquals(3, mdvs17.get(3).getPlace()); + + ///////////////////////////////////////// + // after remove 2 - verify project 3.2 // + ///////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, pr3_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(pe1_1, isProjectOfPerson, pr3_2, BOTH, 0, 0), + isRel(pe5_1, isProjectOfPerson, pr3_2, BOTH, 0, 3) + )) + ); + + List mdvs18 = itemService.getMetadata( + pr3_2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(5, mdvs18.size()); + + assertTrue(mdvs18.get(0) instanceof RelationshipMetadataValue); + assertEquals("person 1 (item)", mdvs18.get(0).getValue()); + assertEquals(0, mdvs18.get(0).getPlace()); + + assertFalse(mdvs18.get(1) instanceof RelationshipMetadataValue); + assertEquals("person 2 (mdv)", mdvs18.get(1).getValue()); + assertEquals(1, mdvs18.get(1).getPlace()); + + assertFalse(mdvs18.get(2) instanceof RelationshipMetadataValue); + assertEquals("person 4 (mdv)", mdvs18.get(2).getValue()); + assertEquals(2, mdvs18.get(2).getPlace()); + + assertTrue(mdvs18.get(3) instanceof RelationshipMetadataValue); + assertEquals("person 5 (item)", mdvs18.get(3).getValue()); + assertEquals(3, mdvs18.get(3).getPlace()); + + assertFalse(mdvs18.get(4) instanceof RelationshipMetadataValue); + assertEquals("person 6 (mdv)", mdvs18.get(4).getValue()); + assertEquals(4, mdvs18.get(4).getPlace()); + } + ); + } + + @Test + public void test_placeRecalculationNoUseForPlace() throws Exception { + // NOTE: this test uses relationship isIssueOfJournalVolume, because it adds virtual metadata + // on both sides of the relationship + + ////////////////// + // create items // + ////////////////// + + // journal volume 1.1 + Item v1_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal volume 1") + .withMetadata("dspace", "entity", "type", journalVolumeEntityType.getLabel()) + .withMetadata("publicationvolume", "volumeNumber", null, "volume nr 1 (rel)") + .build(); + + // journal issue 1.1 + Item i1_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 1") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 1 (rel)") + .build(); + + // journal issue 2.1 + Item i2_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 2") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 2 (rel)") + .build(); + + // journal issue 3.1 + Item i3_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 3") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 3 (rel)") + .build(); + + // journal issue 4.1 + Item i4_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 4") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 4 (rel)") + .build(); + + // journal issue 5.1 + Item i5_1 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 5") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 5 (rel)") + .build(); + + ////////////////////////////////////////////// + // create relationships and metadata values // + ////////////////////////////////////////////// + + // relationship - volume 1 & issue 1 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i1_1, isIssueOfJournalVolume) + .build(); + + // relationship - volume 1 & issue 2 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i2_1, isIssueOfJournalVolume) + .build(); + + // relationship - volume 1 & issue 3 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i3_1, isIssueOfJournalVolume) + .build(); + + // relationship - volume 1 & issue 4 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i4_1, isIssueOfJournalVolume) + .build(); + + // relationship - volume 1 & issue 5 + RelationshipBuilder.createRelationshipBuilder(context, v1_1, i5_1, isIssueOfJournalVolume) + .build(); + + ///////////////////////////////// + // initial - verify volume 3.1 // + ///////////////////////////////// + + List mdvs1 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs1.size()); + + assertTrue(mdvs1.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs1.get(0).getValue()); + assertEquals(0, mdvs1.get(0).getPlace()); + + assertTrue(mdvs1.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs1.get(1).getValue()); + assertEquals(1, mdvs1.get(1).getPlace()); + + assertTrue(mdvs1.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs1.get(2).getValue()); + assertEquals(2, mdvs1.get(2).getPlace()); + + assertTrue(mdvs1.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs1.get(3).getValue()); + assertEquals(3, mdvs1.get(3).getPlace()); + + assertTrue(mdvs1.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs1.get(4).getValue()); + assertEquals(4, mdvs1.get(4).getPlace()); + + ///////////////////////////////////// + // create new version - volume 1.2 // + ///////////////////////////////////// + + Item v1_2 = versioningService.createNewVersion(context, v1_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, v1_2)); + context.commit(); + + //////////////////////////////////// + // create new version - issue 3.2 // + //////////////////////////////////// + + Item i3_2 = versioningService.createNewVersion(context, i3_1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, i3_2)); + context.commit(); + + //////////////////////////////////////////////// + // after version creation - verify volume 3.1 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i2_1, RIGHT_ONLY, 1, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i4_1, RIGHT_ONLY, 3, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs4 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs4.size()); + + assertTrue(mdvs4.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs4.get(0).getValue()); + assertEquals(0, mdvs4.get(0).getPlace()); + + assertTrue(mdvs4.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs4.get(1).getValue()); + assertEquals(1, mdvs4.get(1).getPlace()); + + assertTrue(mdvs4.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs4.get(2).getValue()); + assertEquals(2, mdvs4.get(2).getPlace()); + + assertTrue(mdvs4.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs4.get(3).getValue()); + assertEquals(3, mdvs4.get(3).getPlace()); + + assertTrue(mdvs4.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs4.get(4).getValue()); + assertEquals(4, mdvs4.get(4).getPlace()); + + //////////////////////////////////////////////// + // after version creation - verify volume 1.2 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i2_1, BOTH, 1, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_2, BOTH, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i4_1, BOTH, 3, 0), + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 4, 0) + )) + ); + + List mdvs7 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs7.size()); + + assertTrue(mdvs7.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs7.get(0).getValue()); + assertEquals(0, mdvs7.get(0).getPlace()); + + assertTrue(mdvs7.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs7.get(1).getValue()); + assertEquals(1, mdvs7.get(1).getPlace()); + + assertTrue(mdvs7.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs7.get(2).getValue()); + assertEquals(2, mdvs7.get(2).getPlace()); + + assertTrue(mdvs7.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs7.get(3).getValue()); + assertEquals(3, mdvs7.get(3).getPlace()); + + assertTrue(mdvs7.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs7.get(4).getValue()); + assertEquals(4, mdvs7.get(4).getPlace()); + + /////////////////////////////////////////////////////////// + // remove relationship - volume 1.2 & issue 3.2 // + // since an issue needs a relationship, delete the issue // + /////////////////////////////////////////////////////////// + + Relationship rel1 = getRelationship(v1_2, isIssueOfJournalVolume, i3_2); + assertNotNull(rel1); + + itemService.delete(context, context.reloadEntity(i3_2)); + + context.commit(); + + //////////////////////////////////// + // after remove 1 - cache busting // + //////////////////////////////////// + + v1_2.setMetadataModified(); + v1_2 = context.reloadEntity(v1_2); + + i3_2.setMetadataModified(); + i3_2 = context.reloadEntity(i3_2); + + //////////////////////////////////////// + // after remove 1 - verify volume 3.1 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i2_1, RIGHT_ONLY, 1, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i4_1, RIGHT_ONLY, 3, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs9 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs9.size()); + + assertTrue(mdvs9.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs9.get(0).getValue()); + assertEquals(0, mdvs9.get(0).getPlace()); + + assertTrue(mdvs9.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs9.get(1).getValue()); + assertEquals(1, mdvs9.get(1).getPlace()); + + assertTrue(mdvs9.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs9.get(2).getValue()); + assertEquals(2, mdvs9.get(2).getPlace()); + + assertTrue(mdvs9.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs9.get(3).getValue()); + assertEquals(3, mdvs9.get(3).getPlace()); + + assertTrue(mdvs9.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs9.get(4).getValue()); + assertEquals(4, mdvs9.get(4).getPlace()); + + //////////////////////////////////////// + // after remove 1 - verify volume 3.2 // + //////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i2_1, BOTH, 1, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + // NOTE: left place was reduced by one + isRel(v1_2, isIssueOfJournalVolume, i4_1, BOTH, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 3, 0) + )) + ); + + List mdvs12 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(4, mdvs12.size()); + + assertTrue(mdvs12.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs12.get(0).getValue()); + assertEquals(0, mdvs12.get(0).getPlace()); + + assertTrue(mdvs12.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs12.get(1).getValue()); + assertEquals(1, mdvs12.get(1).getPlace()); + + assertTrue(mdvs12.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs12.get(2).getValue()); + assertEquals(2, mdvs12.get(2).getPlace()); + + assertTrue(mdvs12.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs12.get(3).getValue()); + assertEquals(3, mdvs12.get(3).getPlace()); + + //////////////////////////////////// + // create new version - issue 3.3 // + //////////////////////////////////// + + // journal issue 3.3 + Item i3_3 = ItemBuilder.createItem(context, collection) + .withTitle("journal issue 3") + .withMetadata("dspace", "entity", "type", journalIssueEntityType.getLabel()) + .withMetadata("publicationissue", "issueNumber", null, "issue nr 3 (rel)") + .build(); + + /////////////////////////////////////////////// + // add relationship - volume 1.2 & issue 3.3 // + /////////////////////////////////////////////// + + RelationshipBuilder.createRelationshipBuilder(context, v1_2, i3_3, isIssueOfJournalVolume, 2, -1) + .build(); + + context.commit(); + + //////////////////////////////////////////// + // after add relationship - cache busting // + //////////////////////////////////////////// + + v1_2.setMetadataModified(); + v1_2 = context.reloadEntity(v1_2); + + i3_3.setMetadataModified(); + i3_3 = context.reloadEntity(i3_3); + + //////////////////////////////////////////////// + // after add relationship - verify volume 1.1 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_1, isIssueOfJournalVolume, i1_1, RIGHT_ONLY, 0, 0), + isRel(v1_1, isIssueOfJournalVolume, i2_1, RIGHT_ONLY, 1, 0), + isRel(v1_1, isIssueOfJournalVolume, i3_1, RIGHT_ONLY, 2, 0), + isRel(v1_1, isIssueOfJournalVolume, i4_1, RIGHT_ONLY, 3, 0), + isRel(v1_1, isIssueOfJournalVolume, i5_1, RIGHT_ONLY, 4, 0) + )) + ); + + List mdvs14 = itemService.getMetadata( + v1_1, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs14.size()); + + assertTrue(mdvs14.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs14.get(0).getValue()); + assertEquals(0, mdvs14.get(0).getPlace()); + + assertTrue(mdvs14.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs14.get(1).getValue()); + assertEquals(1, mdvs14.get(1).getPlace()); + + assertTrue(mdvs14.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs14.get(2).getValue()); + assertEquals(2, mdvs14.get(2).getPlace()); + + assertTrue(mdvs14.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs14.get(3).getValue()); + assertEquals(3, mdvs14.get(3).getPlace()); + + assertTrue(mdvs14.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs14.get(4).getValue()); + assertEquals(4, mdvs14.get(4).getPlace()); + + //////////////////////////////////////////////// + // after add relationship - verify volume 1.2 // + //////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, v1_2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(v1_2, isIssueOfJournalVolume, i1_1, BOTH, 0, 0), + isRel(v1_2, isIssueOfJournalVolume, i2_1, BOTH, 1, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_1, LEFT_ONLY, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i3_3, BOTH, 2, 0), + isRel(v1_2, isIssueOfJournalVolume, i4_1, BOTH, 3, 0), + isRel(v1_2, isIssueOfJournalVolume, i5_1, BOTH, 4, 0) + )) + ); + + assertEquals( + 6, + relationshipService.countByItem(context, v1_2, false, false) + ); + + List mdvs17 = itemService.getMetadata( + v1_2, "publicationissue", "issueNumber", null, Item.ANY + ); + assertEquals(5, mdvs17.size()); + + assertTrue(mdvs17.get(0) instanceof RelationshipMetadataValue); + assertEquals("issue nr 1 (rel)", mdvs17.get(0).getValue()); + assertEquals(0, mdvs17.get(0).getPlace()); + + assertTrue(mdvs17.get(1) instanceof RelationshipMetadataValue); + assertEquals("issue nr 2 (rel)", mdvs17.get(1).getValue()); + assertEquals(1, mdvs17.get(1).getPlace()); + + assertTrue(mdvs7.get(2) instanceof RelationshipMetadataValue); + assertEquals("issue nr 3 (rel)", mdvs7.get(2).getValue()); + assertEquals(2, mdvs7.get(2).getPlace()); + + assertTrue(mdvs17.get(3) instanceof RelationshipMetadataValue); + assertEquals("issue nr 4 (rel)", mdvs17.get(3).getValue()); + assertEquals(3, mdvs17.get(3).getPlace()); + + assertTrue(mdvs17.get(4) instanceof RelationshipMetadataValue); + assertEquals("issue nr 5 (rel)", mdvs17.get(4).getValue()); + assertEquals(4, mdvs17.get(4).getPlace()); + + ///////////////////////////////////////////// + // delete volume first for min cardinality // + ///////////////////////////////////////////// + + itemService.delete(context, context.reloadEntity(v1_1)); + itemService.delete(context, context.reloadEntity(v1_2)); + } + + protected void verifySolrField(Item item, String fieldName, List expectedValues) throws Exception { + QueryResponse result = solrSearchCore.getSolr().query(new SolrQuery(String.format( + "search.resourcetype:\"Item\" AND search.resourceid:\"%s\"", item.getID() + ))); + + SolrDocumentList docs = result.getResults(); + Assert.assertEquals(1, docs.size()); + SolrDocument doc = docs.get(0); + + java.util.Collection actualValues = doc.getFieldValues(fieldName); + + if (expectedValues == null) { + assertNull(actualValues); + } else { + assertThat(actualValues, containsInAnyOrder(expectedValues.toArray())); + } + } + + /** + * Setup: + * - two people are linked to one publication + * - create a new version of the publication + * - create a new version of person 1 + * - create a new version of person 2 + * + * Goals: + * - check that the metadata (plain text and from relationships) of the items have the correct value and place, + * as new versions of the items get created and edited + * - verify that changes to newer versions and relationships don't affect older versions and relationships + * - verify that the (versions of) items are properly indexed in the Solr search core + */ + @Test + public void test_virtualMetadataPreserved() throws Exception { + ////////////////////////////////////////////// + // create a publication and link two people // + ////////////////////////////////////////////// + + Item publication1V1 = ItemBuilder.createItem(context, collection) + .withTitle("publication 1V1") + .withMetadata("dspace", "entity", "type", publicationEntityType.getLabel()) + .build(); + + Item person1V1 = ItemBuilder.createItem(context, collection) + .withTitle("person 1V1") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("Donald") + .withPersonIdentifierLastName("Smith") + .build(); + + Item person2V1 = ItemBuilder.createItem(context, collection) + .withTitle("person 2V1") + .withMetadata("dspace", "entity", "type", personEntityType.getLabel()) + .withPersonIdentifierFirstName("Jane") + .withPersonIdentifierLastName("Doe") + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, publication1V1, person1V1, isAuthorOfPublication) + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, publication1V1, person2V1, isAuthorOfPublication) + .withRightwardValue("Doe, J.") + .build(); + + /////////////////////////////////////////////// + // test dc.contributor.author of publication // + /////////////////////////////////////////////// + + List mdvs1 = itemService.getMetadata( + publication1V1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs1.size()); + + assertTrue(mdvs1.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs1.get(0).getValue()); + assertEquals(0, mdvs1.get(0).getPlace()); + + assertTrue(mdvs1.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs1.get(1).getValue()); + assertEquals(1, mdvs1.get(1).getPlace()); + + verifySolrField(publication1V1, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of publication // + //////////////////////////////////////////////////////// + + List mdvsR1 = itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR1.size()); + + assertTrue(mdvsR1.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR1.get(0).getValue()); + assertEquals(0, mdvsR1.get(0).getPlace()); + + assertTrue(mdvsR1.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR1.get(1).getValue()); + assertEquals(1, mdvsR1.get(1).getPlace()); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of publication // + /////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder( + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ) + ) + ); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication.latestForDiscovery", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////// + // create a new version of publication 1 and archive // + /////////////////////////////////////////////////////// + + Item publication1V2 = versioningService.createNewVersion(context, publication1V1).getItem(); + installItemService.installItem(context, workspaceItemService.findByItem(context, publication1V2)); + context.dispatchEvents(); + + //////////////////////////////////// + // create new version of person 1 // + //////////////////////////////////// + + Item person1V2 = versioningService.createNewVersion(context, person1V1).getItem(); + // update "Smith, Donald" to "Smith, D." + itemService.replaceMetadata( + context, person1V2, "person", "givenName", null, null, "D.", + null, -1, 0 + ); + itemService.update(context, person1V2); + context.dispatchEvents(); + + /////////////////// + // cache busting // + /////////////////// + + publication1V1.setMetadataModified(); + publication1V1 = context.reloadEntity(publication1V1); + + publication1V2.setMetadataModified(); + publication1V2 = context.reloadEntity(publication1V2); + + /////////////////////////////////////////////////// + // test dc.contributor.author of old publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V1, isAuthorOfPublication, person1V1, RIGHT_ONLY, 0, 0), + isRel(publication1V1, isAuthorOfPublication, person2V1, RIGHT_ONLY, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs2 = itemService.getMetadata( + publication1V1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs2.size()); + + assertTrue(mdvs2.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs2.get(0).getValue()); + assertEquals(0, mdvs2.get(0).getPlace()); + + assertTrue(mdvs2.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs2.get(1).getValue()); + assertEquals(1, mdvs2.get(1).getPlace()); + + verifySolrField(publication1V1, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of old publication // + //////////////////////////////////////////////////////////// + + List mdvsR2 = itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR2.size()); + + assertTrue(mdvsR2.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR2.get(0).getValue()); + assertEquals(0, mdvsR2.get(0).getPlace()); + + assertTrue(mdvsR2.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR2.get(1).getValue()); + assertEquals(1, mdvsR2.get(1).getPlace()); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of old publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder() + ); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication.latestForDiscovery", null); + + /////////////////////////////////////////////////// + // test dc.contributor.author of new publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V2, isAuthorOfPublication, person1V1, BOTH, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person1V2, LEFT_ONLY, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person2V1, BOTH, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs3 = itemService.getMetadata( + publication1V2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs3.size()); + + assertTrue(mdvs3.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs3.get(0).getValue()); + assertEquals(0, mdvs3.get(0).getPlace()); + + assertTrue(mdvs3.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs3.get(1).getValue()); + assertEquals(1, mdvs3.get(1).getPlace()); + + verifySolrField(publication1V2, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of new publication // + //////////////////////////////////////////////////////////// + + List mdvsR3 = itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR3.size()); + + assertTrue(mdvsR3.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR3.get(0).getValue()); + assertEquals(0, mdvsR3.get(0).getPlace()); + + assertTrue(mdvsR3.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR3.get(1).getValue()); + assertEquals(1, mdvsR3.get(1).getPlace()); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of new publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder( + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ) + ) + ); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication.latestForDiscovery", List.of( + person1V1.getID().toString(), person1V2.getID().toString(), person2V1.getID().toString() + )); + + ///////////////////////////////////// + // archive new version of person 1 // + ///////////////////////////////////// + + installItemService.installItem(context, workspaceItemService.findByItem(context, person1V2)); + context.dispatchEvents(); + + /////////////////// + // cache busting // + /////////////////// + + publication1V1.setMetadataModified(); + publication1V1 = context.reloadEntity(publication1V1); + + publication1V2.setMetadataModified(); + publication1V2 = context.reloadEntity(publication1V2); + + /////////////////////////////////////////////////// + // test dc.contributor.author of old publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V1, isAuthorOfPublication, person1V1, RIGHT_ONLY, 0, 0), + isRel(publication1V1, isAuthorOfPublication, person2V1, RIGHT_ONLY, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs4 = itemService.getMetadata( + publication1V1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs4.size()); + + assertTrue(mdvs4.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs4.get(0).getValue()); + assertEquals(0, mdvs4.get(0).getPlace()); + + assertTrue(mdvs4.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs4.get(1).getValue()); + assertEquals(1, mdvs4.get(1).getPlace()); + + verifySolrField(publication1V1, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of publication // + //////////////////////////////////////////////////////// + + List mdvsR4 = itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR4.size()); + + assertTrue(mdvsR4.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR4.get(0).getValue()); + assertEquals(0, mdvsR4.get(0).getPlace()); + + assertTrue(mdvsR4.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR4.get(1).getValue()); + assertEquals(1, mdvsR4.get(1).getPlace()); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of publication // + /////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder() + ); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication.latestForDiscovery", null); + + /////////////////////////////////////////////////// + // test dc.contributor.author of new publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V2, isAuthorOfPublication, person1V1, LEFT_ONLY, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person1V2, BOTH, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person2V1, BOTH, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs5 = itemService.getMetadata( + publication1V2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs5.size()); + + assertTrue(mdvs5.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, D.", mdvs5.get(0).getValue()); + assertEquals(0, mdvs5.get(0).getPlace()); + + assertTrue(mdvs5.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs5.get(1).getValue()); + assertEquals(1, mdvs5.get(1).getPlace()); + + verifySolrField(publication1V2, "dc.contributor.author", List.of( + "Smith, D.", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of new publication // + //////////////////////////////////////////////////////////// + + List mdvsR5 = itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR5.size()); + + assertTrue(mdvsR5.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V2.getID().toString(), mdvsR5.get(0).getValue()); + assertEquals(0, mdvsR5.get(0).getPlace()); + + assertTrue(mdvsR5.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR5.get(1).getValue()); + assertEquals(1, mdvsR5.get(1).getPlace()); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication", List.of( + person1V2.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of new publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder( + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ) + ) + ); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication.latestForDiscovery", List.of( + person1V1.getID().toString(), person1V2.getID().toString(), person2V1.getID().toString() + )); + + //////////////////////////////////// + // create new version of person 2 // + //////////////////////////////////// + + Item person2V2 = versioningService.createNewVersion(context, person2V1).getItem(); + Relationship rel1 = getRelationship(publication1V2, isAuthorOfPublication, person2V2); + assertNotNull(rel1); + rel1.setRightwardValue("Doe, Jane Jr"); + relationshipService.update(context, rel1); + context.dispatchEvents(); + + /////////////////// + // cache busting // + /////////////////// + + publication1V1.setMetadataModified(); + publication1V1 = context.reloadEntity(publication1V1); + + publication1V2.setMetadataModified(); + publication1V2 = context.reloadEntity(publication1V2); + + /////////////////////////////////////////////////// + // test dc.contributor.author of old publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V1, isAuthorOfPublication, person1V1, RIGHT_ONLY, 0, 0), + isRel(publication1V1, isAuthorOfPublication, person2V1, RIGHT_ONLY, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs6 = itemService.getMetadata( + publication1V1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs6.size()); + + assertTrue(mdvs6.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs6.get(0).getValue()); + assertEquals(0, mdvs6.get(0).getPlace()); + + assertTrue(mdvs6.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs6.get(1).getValue()); + assertEquals(1, mdvs6.get(1).getPlace()); + + verifySolrField(publication1V1, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of old publication // + //////////////////////////////////////////////////////////// + + List mdvsR6 = itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR6.size()); + + assertTrue(mdvsR6.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR6.get(0).getValue()); + assertEquals(0, mdvsR6.get(0).getPlace()); + + assertTrue(mdvsR6.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR6.get(1).getValue()); + assertEquals(1, mdvsR6.get(1).getPlace()); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of old publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder() + ); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication.latestForDiscovery", null); + + /////////////////////////////////////////////////// + // test dc.contributor.author of new publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V2, isAuthorOfPublication, person1V1, LEFT_ONLY, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person1V2, BOTH, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person2V1, BOTH, null, "Doe, J.", 1, 0), + isRel(publication1V2, isAuthorOfPublication, person2V2, LEFT_ONLY, null, "Doe, Jane Jr", 1, 0) + )) + ); + + List mdvs7 = itemService.getMetadata( + publication1V2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs7.size()); + + assertTrue(mdvs7.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, D.", mdvs7.get(0).getValue()); + assertEquals(0, mdvs7.get(0).getPlace()); + + assertTrue(mdvs7.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs7.get(1).getValue()); + assertEquals(1, mdvs7.get(1).getPlace()); + + verifySolrField(publication1V2, "dc.contributor.author", List.of( + "Smith, D.", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of new publication // + //////////////////////////////////////////////////////////// + + List mdvsR7 = itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR7.size()); + + assertTrue(mdvsR7.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V2.getID().toString(), mdvsR7.get(0).getValue()); + assertEquals(0, mdvsR7.get(0).getPlace()); + + assertTrue(mdvsR7.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR7.get(1).getValue()); + assertEquals(1, mdvsR7.get(1).getPlace()); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication", List.of( + person1V2.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of new publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder( + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ) + ) + ); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication.latestForDiscovery", List.of( + person1V1.getID().toString(), person1V2.getID().toString(), + person2V1.getID().toString(), person2V2.getID().toString() + )); + + ///////////////////////////////////// + // archive new version of person 2 // + ///////////////////////////////////// + + installItemService.installItem(context, workspaceItemService.findByItem(context, person2V2)); + context.dispatchEvents(); + + /////////////////// + // cache busting // + /////////////////// + + publication1V1.setMetadataModified(); + publication1V1 = context.reloadEntity(publication1V1); + + publication1V2.setMetadataModified(); + publication1V2 = context.reloadEntity(publication1V2); + + /////////////////////////////////////////////////// + // test dc.contributor.author of old publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V1, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V1, isAuthorOfPublication, person1V1, RIGHT_ONLY, 0, 0), + isRel(publication1V1, isAuthorOfPublication, person2V1, RIGHT_ONLY, null, "Doe, J.", 1, 0) + )) + ); + + List mdvs8 = itemService.getMetadata( + publication1V1, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs8.size()); + + assertTrue(mdvs8.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, Donald", mdvs8.get(0).getValue()); + assertEquals(0, mdvs8.get(0).getPlace()); + + assertTrue(mdvs8.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, J.", mdvs8.get(1).getValue()); + assertEquals(1, mdvs8.get(1).getPlace()); + + verifySolrField(publication1V1, "dc.contributor.author", List.of( + "Smith, Donald", "Doe, J." + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of old publication // + //////////////////////////////////////////////////////////// + + List mdvsR8 = itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR8.size()); + + assertTrue(mdvsR8.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V1.getID().toString(), mdvsR8.get(0).getValue()); + assertEquals(0, mdvsR8.get(0).getPlace()); + + assertTrue(mdvsR8.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V1.getID().toString(), mdvsR8.get(1).getValue()); + assertEquals(1, mdvsR8.get(1).getPlace()); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication", List.of( + person1V1.getID().toString(), person2V1.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of old publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V1, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder() + ); + + verifySolrField(publication1V1, "relation.isAuthorOfPublication.latestForDiscovery", null); + + /////////////////////////////////////////////////// + // test dc.contributor.author of new publication // + /////////////////////////////////////////////////// + + assertThat( + relationshipService.findByItem(context, publication1V2, -1, -1, false, false), + containsInAnyOrder(List.of( + isRel(publication1V2, isAuthorOfPublication, person1V1, LEFT_ONLY, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person1V2, BOTH, 0, 0), + isRel(publication1V2, isAuthorOfPublication, person2V1, LEFT_ONLY, null, "Doe, J.", 1, 0), + isRel(publication1V2, isAuthorOfPublication, person2V2, BOTH, null, "Doe, Jane Jr", 1, 0) + )) + ); + + List mdvs9 = itemService.getMetadata( + publication1V2, "dc", "contributor", "author", Item.ANY + ); + assertEquals(2, mdvs9.size()); + + assertTrue(mdvs9.get(0) instanceof RelationshipMetadataValue); + assertEquals("Smith, D.", mdvs9.get(0).getValue()); + assertEquals(0, mdvs9.get(0).getPlace()); + + assertTrue(mdvs9.get(1) instanceof RelationshipMetadataValue); + assertEquals("Doe, Jane Jr", mdvs9.get(1).getValue()); + assertEquals(1, mdvs9.get(1).getPlace()); + + verifySolrField(publication1V2, "dc.contributor.author", List.of( + "Smith, D.", "Doe, Jane Jr" + )); + + //////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication of new publication // + //////////////////////////////////////////////////////////// + + List mdvsR9 = itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", null, Item.ANY + ); + assertEquals(2, mdvsR9.size()); + + assertTrue(mdvsR9.get(0) instanceof RelationshipMetadataValue); + assertEquals(person1V2.getID().toString(), mdvsR9.get(0).getValue()); + assertEquals(0, mdvsR9.get(0).getPlace()); + + assertTrue(mdvsR9.get(1) instanceof RelationshipMetadataValue); + assertEquals(person2V2.getID().toString(), mdvsR9.get(1).getValue()); + assertEquals(1, mdvsR9.get(1).getPlace()); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication", List.of( + person1V2.getID().toString(), person2V2.getID().toString() + )); + + /////////////////////////////////////////////////////////////////////////////// + // test relation.isAuthorOfPublication.latestForDiscovery of new publication // + /////////////////////////////////////////////////////////////////////////////// + + assertThat( + itemService.getMetadata( + publication1V2, "relation", "isAuthorOfPublication", "latestForDiscovery", Item.ANY + ), + containsInAnyOrder( + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person1V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V1.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ), + allOf( + instanceOf(RelationshipMetadataValue.class), + hasProperty("value", is(person2V2.getID().toString())), + hasProperty("place", is(-1)), + hasProperty("authority", startsWith(Constants.VIRTUAL_AUTHORITY_PREFIX)) + ) + ) + ); + + verifySolrField(publication1V2, "relation.isAuthorOfPublication.latestForDiscovery", List.of( + person1V1.getID().toString(), person1V2.getID().toString(), + person2V1.getID().toString(), person2V2.getID().toString() + )); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/content/WorkspaceItemTest.java b/dspace-api/src/test/java/org/dspace/content/WorkspaceItemTest.java index 609768bf6786..d018a15f9765 100644 --- a/dspace-api/src/test/java/org/dspace/content/WorkspaceItemTest.java +++ b/dspace-api/src/test/java/org/dspace/content/WorkspaceItemTest.java @@ -14,6 +14,8 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; @@ -33,12 +35,15 @@ import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.core.Constants; +import org.dspace.core.Context; import org.dspace.eperson.EPerson; import org.dspace.eperson.factory.EPersonServiceFactory; import org.dspace.eperson.service.EPersonService; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; import org.springframework.test.util.ReflectionTestUtils; /** @@ -46,6 +51,7 @@ * * @author pvillega */ +@RunWith(MockitoJUnitRunner.class) public class WorkspaceItemTest extends AbstractUnitTest { /** @@ -98,6 +104,7 @@ public void init() { // "Wire" our spy to be used by the current loaded object services // (To ensure these services use the spy instead of the real service) ReflectionTestUtils.setField(workspaceItemService, "authorizeService", authorizeServiceSpy); + ReflectionTestUtils.setField(itemService, "authorizeService", authorizeServiceSpy); ReflectionTestUtils.setField(collectionService, "authorizeService", authorizeServiceSpy); ReflectionTestUtils.setField(communityService, "authorizeService", authorizeServiceSpy); } catch (AuthorizeException ex) { @@ -158,7 +165,8 @@ public void testFind() throws Exception { @Test public void testCreateAuth() throws Exception { // Allow Collection ADD perms - doNothing().when(authorizeServiceSpy).authorizeAction(context, collection, Constants.ADD); + doNothing().when(authorizeServiceSpy).authorizeAction(any(Context.class), + any(Collection.class), eq(Constants.ADD)); boolean template; WorkspaceItem created; diff --git a/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java b/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java index 77cf105dd40a..255b070e5eac 100644 --- a/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java +++ b/dspace-api/src/test/java/org/dspace/content/authority/DSpaceControlledVocabularyTest.java @@ -13,7 +13,6 @@ import java.io.IOException; import org.dspace.AbstractDSpaceTest; -import org.dspace.content.Collection; import org.dspace.core.factory.CoreServiceFactory; import org.junit.After; import org.junit.AfterClass; @@ -66,6 +65,8 @@ public void testGetPluginNames() /** * Test of getMatches method, of class DSpaceControlledVocabulary. + * @throws java.io.IOException passed through. + * @throws java.lang.ClassNotFoundException passed through. */ @Test public void testGetMatches() throws IOException, ClassNotFoundException { @@ -74,9 +75,7 @@ public void testGetMatches() throws IOException, ClassNotFoundException { final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority"; // Ensure that 'id' attribute is optional - String field = null; // not used String text = "north 40"; - Collection collection = null; int start = 0; int limit = 10; String locale = null; @@ -87,7 +86,7 @@ public void testGetMatches() throws IOException, ClassNotFoundException { CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(Class.forName(PLUGIN_INTERFACE), "farm"); assertNotNull(instance); Choices result = instance.getMatches(text, start, limit, locale); - assertEquals("the farm::north 40", result.values[0].value); + assertEquals("north 40", result.values[0].value); } /** diff --git a/dspace-api/src/test/java/org/dspace/content/crosswalk/QDCCrosswalkTest.java b/dspace-api/src/test/java/org/dspace/content/crosswalk/QDCCrosswalkTest.java index cd52800a92c6..2eafc03986a7 100644 --- a/dspace-api/src/test/java/org/dspace/content/crosswalk/QDCCrosswalkTest.java +++ b/dspace-api/src/test/java/org/dspace/content/crosswalk/QDCCrosswalkTest.java @@ -14,7 +14,7 @@ import org.dspace.core.service.PluginService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; -import org.jdom.Namespace; +import org.jdom2.Namespace; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -76,6 +76,8 @@ public static void tearDownClass() { @Before public void setUp() { + // make sure that the config properties set in @BeforeClass are picked up + QDCCrosswalk.initStatic(); } @After diff --git a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplIT.java b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplIT.java new file mode 100644 index 000000000000..2d08223b2e3e --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplIT.java @@ -0,0 +1,165 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.dao; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractIntegrationTest; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.EntityTypeService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.RelationshipTypeService; +import org.dspace.content.service.WorkspaceItemService; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Created by: Andrew Wood + * Date: 20 Sep 2019 + */ +public class RelationshipDAOImplIT extends AbstractIntegrationTest { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipDAOImplIT.class); + + private Relationship relationship; + + private Item itemOne; + + private Item itemTwo; + + private Collection collection; + + private Community owningCommunity; + + private RelationshipType relationshipType; + + private List relationshipsList = new ArrayList<>(); + + private EntityType entityTypeOne; + + private EntityType entityTypeTwo; + + protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected RelationshipTypeService relationshipTypeService = + ContentServiceFactory.getInstance().getRelationshipTypeService(); + protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); + + /** + * Initalize DSpace objects used for testing for each test + */ + @Before + @Override + public void init() { + super.init(); + try { + // Create objects for testing + context.turnOffAuthorisationSystem(); + owningCommunity = communityService.create(null, context); + collection = collectionService.create(context, owningCommunity); + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false); + WorkspaceItem workspaceItemTwo = workspaceItemService.create(context, collection, false); + itemOne = installItemService.installItem(context, workspaceItem); + itemTwo = installItemService.installItem(context, workspaceItemTwo); + itemService.addMetadata(context, itemOne, "dspace", "entity", "type", Item.ANY, "Publication"); + itemService.addMetadata(context, itemTwo, "dspace", "entity", "type", Item.ANY, "Person"); + itemService.update(context, itemOne); + itemService.update(context, itemTwo); + entityTypeOne = entityTypeService.create(context, "Person"); + entityTypeTwo = entityTypeService.create(context, "Publication"); + relationshipType = relationshipTypeService.create(context, entityTypeTwo, entityTypeOne, + "isAuthorOfPublication", "isPublicationOfAuthor",0,10,0,10); + relationship = relationshipService.create(context, itemOne, itemTwo, relationshipType, 0, 0); + relationshipService.update(context, relationship); + relationshipsList.add(relationship); + context.restoreAuthSystemState(); + } catch (Exception e) { + log.error(e); + fail(e.getMessage()); + } + } + + /** + * Delete all initalized DSpace objects after each test + */ + @After + @Override + public void destroy() { + try { + context.turnOffAuthorisationSystem(); + relationshipService.delete(context, relationship); + relationshipTypeService.delete(context, relationshipType); + entityTypeService.delete(context, entityTypeTwo); + entityTypeService.delete(context, entityTypeOne); + itemService.delete(context, itemOne); + itemService.delete(context, itemTwo); + } catch (Exception e) { + log.error(e); + fail(e.getMessage()); + } + super.destroy(); + + } + + /** + * Test findItem should return our defined relationshipsList given our test Item itemOne. + * + * @throws Exception + */ + @Test + public void testFindByItem() throws Exception { + assertEquals("TestFindByItem 0", relationshipsList, relationshipService.findByItem(context, itemOne, + -1, -1, false)); + } + + /** + * Test findByRelationshipType should return our defined relationshipsList given our test RelationshipType + * relationshipType + * + * @throws Exception + */ + @Test + public void testFindByRelationshipType() throws Exception { + assertEquals("TestByRelationshipType 0", relationshipsList, relationshipService.findByRelationshipType(context, + relationshipType)); + } + + /** + * Test countTotal should return our defined relationshipsList's size given our test Context + * context + * + * @throws Exception + */ + @Test + public void testCountRows() throws Exception { + assertEquals("TestByRelationshipType 0", relationshipsList.size(), relationshipService.countTotal(context)); + } + + +} diff --git a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java deleted file mode 100644 index 2143090fcf9e..000000000000 --- a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipDAOImplTest.java +++ /dev/null @@ -1,187 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.content.dao; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.logging.log4j.Logger; -import org.dspace.AbstractIntegrationTest; -import org.dspace.content.Collection; -import org.dspace.content.Community; -import org.dspace.content.EntityType; -import org.dspace.content.Item; -import org.dspace.content.Relationship; -import org.dspace.content.RelationshipType; -import org.dspace.content.WorkspaceItem; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.CommunityService; -import org.dspace.content.service.EntityTypeService; -import org.dspace.content.service.InstallItemService; -import org.dspace.content.service.ItemService; -import org.dspace.content.service.RelationshipService; -import org.dspace.content.service.RelationshipTypeService; -import org.dspace.content.service.WorkspaceItemService; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -/** - * Created by: Andrew Wood - * Date: 20 Sep 2019 - */ -public class RelationshipDAOImplTest extends AbstractIntegrationTest { - - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipDAOImplTest.class); - - private Relationship relationship; - - private Item itemOne; - - private Item itemTwo; - - private Collection collection; - - private Community owningCommunity; - - private RelationshipType relationshipType; - - private List relationshipsList = new ArrayList<>(); - - private EntityType entityTypeOne; - - private EntityType entityTypeTwo; - - protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); - protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); - protected RelationshipTypeService relationshipTypeService = - ContentServiceFactory.getInstance().getRelationshipTypeService(); - protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); - protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); - - /** - * Initalize DSpace objects used for testing for each test - */ - @Before - @Override - public void init() { - super.init(); - try { - // Create objects for testing - context.turnOffAuthorisationSystem(); - owningCommunity = communityService.create(null, context); - collection = collectionService.create(context, owningCommunity); - WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false); - WorkspaceItem workspaceItemTwo = workspaceItemService.create(context, collection, false); - itemOne = installItemService.installItem(context, workspaceItem); - itemTwo = installItemService.installItem(context, workspaceItemTwo); - itemService.addMetadata(context, itemOne, "dspace", "entity", "type", Item.ANY, "Publication"); - itemService.addMetadata(context, itemTwo, "dspace", "entity", "type", Item.ANY, "Person"); - itemService.update(context, itemOne); - itemService.update(context, itemTwo); - entityTypeOne = entityTypeService.create(context, "Person"); - entityTypeTwo = entityTypeService.create(context, "Publication"); - relationshipType = relationshipTypeService.create(context, entityTypeTwo, entityTypeOne, - "isAuthorOfPublication", "isPublicationOfAuthor",0,10,0,10); - relationship = relationshipService.create(context, itemOne, itemTwo, relationshipType, 0, 0); - relationshipService.update(context, relationship); - relationshipsList.add(relationship); - context.restoreAuthSystemState(); - } catch (Exception e) { - log.error(e); - fail(e.getMessage()); - } - } - - /** - * Delete all initalized DSpace objects after each test - */ - @After - @Override - public void destroy() { - try { - context.turnOffAuthorisationSystem(); - relationshipService.delete(context, relationship); - relationshipTypeService.delete(context, relationshipType); - entityTypeService.delete(context, entityTypeTwo); - entityTypeService.delete(context, entityTypeOne); - itemService.delete(context, itemOne); - itemService.delete(context, itemTwo); - } catch (Exception e) { - log.error(e); - fail(e.getMessage()); - } - super.destroy(); - - } - - /** - * Test findItem should return our defined relationshipsList given our test Item itemOne. - * - * @throws Exception - */ - @Test - public void testFindByItem() throws Exception { - assertEquals("TestFindByItem 0", relationshipsList, relationshipService.findByItem(context, itemOne, - -1, -1, false)); - } - - /** - * Test findNextLeftPlaceByLeftItem should return 0 given our test left Item itemOne. - * - * @throws Exception - */ - @Test - public void testFindNextLeftPlaceByLeftItem() throws Exception { - assertEquals("TestNextLeftPlaceByLeftItem 0", 1, relationshipService.findNextLeftPlaceByLeftItem(context, - itemOne)); - } - - /** - * Test findNextRightPlaceByRightItem should return 0 given our test right Item itemTwo. - * - * @throws Exception - */ - @Test - public void testFindNextRightPlaceByRightItem() throws Exception { - assertEquals("TestNextRightPlaceByRightItem 0", 1, relationshipService.findNextRightPlaceByRightItem(context, - itemTwo)); - } - - /** - * Test findByRelationshipType should return our defined relationshipsList given our test RelationshipType - * relationshipType - * - * @throws Exception - */ - @Test - public void testFindByRelationshipType() throws Exception { - assertEquals("TestByRelationshipType 0", relationshipsList, relationshipService.findByRelationshipType(context, - relationshipType)); - } - - /** - * Test countTotal should return our defined relationshipsList's size given our test Context - * context - * - * @throws Exception - */ - @Test - public void testCountRows() throws Exception { - assertEquals("TestByRelationshipType 0", relationshipsList.size(), relationshipService.countTotal(context)); - } - - -} diff --git a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplIT.java b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplIT.java new file mode 100644 index 000000000000..ff7d03b49f6d --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplIT.java @@ -0,0 +1,161 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.dao; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractIntegrationTest; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.EntityTypeService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.RelationshipService; +import org.dspace.content.service.RelationshipTypeService; +import org.dspace.content.service.WorkspaceItemService; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class RelationshipTypeDAOImplIT extends AbstractIntegrationTest { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipTypeDAOImplIT.class); + + private Relationship relationship; + + private Item itemOne; + + private Item itemTwo; + + private Collection collection; + + private Community owningCommunity; + + private RelationshipType relationshipType; + + private List relationshipTypeList = new ArrayList<>(); + + private EntityType entityTypeOne; + + private EntityType entityTypeTwo; + + protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected RelationshipTypeService relationshipTypeService = + ContentServiceFactory.getInstance().getRelationshipTypeService(); + protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); + + /** + * Initalize DSpace objects used for testing for each test + */ + @Before + @Override + public void init() { + super.init(); + try { + context.turnOffAuthorisationSystem(); + owningCommunity = communityService.create(null, context); + collection = collectionService.create(context, owningCommunity); + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false); + WorkspaceItem workspaceItemTwo = workspaceItemService.create(context, collection, false); + itemOne = installItemService.installItem(context, workspaceItem); + itemTwo = installItemService.installItem(context, workspaceItemTwo); + itemService.addMetadata(context, itemOne, "dspace", "entity", "type", Item.ANY, "Publication"); + itemService.addMetadata(context, itemTwo, "dspace", "entity", "type", Item.ANY, "Person"); + itemService.update(context, itemOne); + itemService.update(context, itemTwo); + entityTypeOne = entityTypeService.create(context, "Person"); + entityTypeTwo = entityTypeService.create(context, "Publication"); + relationshipType = relationshipTypeService.create(context, entityTypeTwo, entityTypeOne, + "isAuthorOfPublication", "isPublicationOfAuthor",0,10,0,10); + relationship = relationshipService.create(context, itemOne, itemTwo, relationshipType, 0, 0); + relationshipService.update(context, relationship); + relationshipTypeList.add(relationshipType); + context.restoreAuthSystemState(); + } catch (Exception e) { + log.error(e); + fail(e.getMessage()); + } + } + + /** + * Delete all initalized DSpace objects after each test + */ + @After + @Override + public void destroy() { + try { + // Cleanup newly created objects + context.turnOffAuthorisationSystem(); + relationshipService.delete(context, relationship); + relationshipTypeService.delete(context, relationshipType); + entityTypeService.delete(context, entityTypeTwo); + entityTypeService.delete(context, entityTypeOne); + itemService.delete(context, itemOne); + itemService.delete(context, itemTwo); + } catch (Exception e) { + log.error(e); + fail(e.getMessage()); + } + super.destroy(); + + } + + /** + * Test findbyTypesAndLabels should return our defined RelationshipType given our test Entities entityTypeTwo and + * entityTypeOne with the affiliated labels isAuthorOfPublication and isPublicationOfAuthor + * + * @throws Exception + */ + @Test + public void testFindByTypesAndLabels() throws Exception { + assertEquals("TestFindbyTypesAndLabels 0", relationshipType, relationshipTypeService + .findbyTypesAndTypeName(context, entityTypeTwo, entityTypeOne, "isAuthorOfPublication", + "isPublicationOfAuthor")); + } + + /** + * Test findByLeftOrRightLabel should return our defined relationshipTypeList given one of our affiliated labels + * + * @throws Exception + */ + @Test + public void testFindByLeftOrRightLabel() throws Exception { + assertEquals("TestFindByLeftOrRightLabel 0", relationshipTypeList, relationshipTypeService. + findByLeftwardOrRightwardTypeName(context, "isAuthorOfPublication", -1, -1)); + } + + /** + * Test findByEntityType should return our defined relationshipsList given one our defined EntityTypes + * entityTypeOne + * + * @throws Exception + */ + @Test + public void testFindByEntityType() throws Exception { + assertEquals("TestFindByEntityType 0", relationshipTypeList, relationshipTypeService.findByEntityType(context, + entityTypeOne)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplTest.java b/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplTest.java deleted file mode 100644 index 3fff6fec4762..000000000000 --- a/dspace-api/src/test/java/org/dspace/content/dao/RelationshipTypeDAOImplTest.java +++ /dev/null @@ -1,161 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.content.dao; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -import java.util.ArrayList; -import java.util.List; - -import org.apache.logging.log4j.Logger; -import org.dspace.AbstractIntegrationTest; -import org.dspace.content.Collection; -import org.dspace.content.Community; -import org.dspace.content.EntityType; -import org.dspace.content.Item; -import org.dspace.content.Relationship; -import org.dspace.content.RelationshipType; -import org.dspace.content.WorkspaceItem; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.CommunityService; -import org.dspace.content.service.EntityTypeService; -import org.dspace.content.service.InstallItemService; -import org.dspace.content.service.ItemService; -import org.dspace.content.service.RelationshipService; -import org.dspace.content.service.RelationshipTypeService; -import org.dspace.content.service.WorkspaceItemService; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class RelationshipTypeDAOImplTest extends AbstractIntegrationTest { - - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(RelationshipTypeDAOImplTest.class); - - private Relationship relationship; - - private Item itemOne; - - private Item itemTwo; - - private Collection collection; - - private Community owningCommunity; - - private RelationshipType relationshipType; - - private List relationshipTypeList = new ArrayList<>(); - - private EntityType entityTypeOne; - - private EntityType entityTypeTwo; - - protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); - protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); - protected RelationshipTypeService relationshipTypeService = - ContentServiceFactory.getInstance().getRelationshipTypeService(); - protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); - protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); - - /** - * Initalize DSpace objects used for testing for each test - */ - @Before - @Override - public void init() { - super.init(); - try { - context.turnOffAuthorisationSystem(); - owningCommunity = communityService.create(null, context); - collection = collectionService.create(context, owningCommunity); - WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false); - WorkspaceItem workspaceItemTwo = workspaceItemService.create(context, collection, false); - itemOne = installItemService.installItem(context, workspaceItem); - itemTwo = installItemService.installItem(context, workspaceItemTwo); - itemService.addMetadata(context, itemOne, "dspace", "entity", "type", Item.ANY, "Publication"); - itemService.addMetadata(context, itemTwo, "dspace", "entity", "type", Item.ANY, "Person"); - itemService.update(context, itemOne); - itemService.update(context, itemTwo); - entityTypeOne = entityTypeService.create(context, "Person"); - entityTypeTwo = entityTypeService.create(context, "Publication"); - relationshipType = relationshipTypeService.create(context, entityTypeTwo, entityTypeOne, - "isAuthorOfPublication", "isPublicationOfAuthor",0,10,0,10); - relationship = relationshipService.create(context, itemOne, itemTwo, relationshipType, 0, 0); - relationshipService.update(context, relationship); - relationshipTypeList.add(relationshipType); - context.restoreAuthSystemState(); - } catch (Exception e) { - log.error(e); - fail(e.getMessage()); - } - } - - /** - * Delete all initalized DSpace objects after each test - */ - @After - @Override - public void destroy() { - try { - // Cleanup newly created objects - context.turnOffAuthorisationSystem(); - relationshipService.delete(context, relationship); - relationshipTypeService.delete(context, relationshipType); - entityTypeService.delete(context, entityTypeTwo); - entityTypeService.delete(context, entityTypeOne); - itemService.delete(context, itemOne); - itemService.delete(context, itemTwo); - } catch (Exception e) { - log.error(e); - fail(e.getMessage()); - } - super.destroy(); - - } - - /** - * Test findbyTypesAndLabels should return our defined RelationshipType given our test Entities entityTypeTwo and - * entityTypeOne with the affiliated labels isAuthorOfPublication and isPublicationOfAuthor - * - * @throws Exception - */ - @Test - public void testFindByTypesAndLabels() throws Exception { - assertEquals("TestFindbyTypesAndLabels 0", relationshipType, relationshipTypeService - .findbyTypesAndTypeName(context, entityTypeTwo, entityTypeOne, "isAuthorOfPublication", - "isPublicationOfAuthor")); - } - - /** - * Test findByLeftOrRightLabel should return our defined relationshipTypeList given one of our affiliated labels - * - * @throws Exception - */ - @Test - public void testFindByLeftOrRightLabel() throws Exception { - assertEquals("TestFindByLeftOrRightLabel 0", relationshipTypeList, relationshipTypeService. - findByLeftwardOrRightwardTypeName(context, "isAuthorOfPublication", -1, -1)); - } - - /** - * Test findByEntityType should return our defined relationshipsList given one our defined EntityTypes - * entityTypeOne - * - * @throws Exception - */ - @Test - public void testFindByEntityType() throws Exception { - assertEquals("TestFindByEntityType 0", relationshipTypeList, relationshipTypeService.findByEntityType(context, - entityTypeOne)); - } -} diff --git a/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java b/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java new file mode 100644 index 000000000000..0e0864622043 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/logic/LogicalFilterTest.java @@ -0,0 +1,654 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.logic; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractUnitTest; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.MetadataField; +import org.dspace.content.MetadataSchemaEnum; +import org.dspace.content.MetadataValue; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.logic.condition.BitstreamCountCondition; +import org.dspace.content.logic.condition.Condition; +import org.dspace.content.logic.condition.InCollectionCondition; +import org.dspace.content.logic.condition.InCommunityCondition; +import org.dspace.content.logic.condition.IsWithdrawnCondition; +import org.dspace.content.logic.condition.MetadataValueMatchCondition; +import org.dspace.content.logic.condition.MetadataValuesMatchCondition; +import org.dspace.content.logic.condition.ReadableByGroupCondition; +import org.dspace.content.logic.operator.And; +import org.dspace.content.logic.operator.Nand; +import org.dspace.content.logic.operator.Nor; +import org.dspace.content.logic.operator.Not; +import org.dspace.content.logic.operator.Or; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.BundleService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.MetadataFieldService; +import org.dspace.content.service.MetadataValueService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit tests for logical filters, conditions and operators + * @author Kim Shepherd + */ +public class LogicalFilterTest extends AbstractUnitTest { + // Required services + protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); + protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService(); + protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + private MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); + private MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); + private AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + private GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + + // Logger + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(LogicalFilterTest.class); + + // Items and repository structure for testing + Community communityOne; + Community communityTwo; + Collection collectionOne; + Collection collectionTwo; + Item itemOne; + Item itemTwo; + Item itemThree; + + // Some simple statement lists for testing + List trueStatements; + List trueFalseStatements; + List falseStatements; + LogicalStatement trueStatementOne; + LogicalStatement falseStatementOne; + + // Field and values used to set title metadata + String element = "title"; + String qualifier = null; + MetadataField metadataField; + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + * + * Other methods can be annotated with @Before here or in subclasses + * but no execution order is guaranteed + */ + @Before + @Override + public void init() { + super.init(); + try { + context.turnOffAuthorisationSystem(); + // Set up logical statement lists for operator testing + setUpStatements(); + // Set up DSpace resources for condition and filter testing + // Set up first community, collection and item + this.communityOne = communityService.create(null, context); + this.collectionOne = collectionService.create(context, communityOne); + WorkspaceItem workspaceItem = workspaceItemService.create(context, collectionOne, false); + this.itemOne = installItemService.installItem(context, workspaceItem); + // Add one bitstream to item one, but put it in THUMBNAIL bundle + bundleService.addBitstream(context, bundleService.create(context, itemOne, "THUMBNAIL"), + bitstreamService.create(context, + new ByteArrayInputStream("Item 1 Thumbnail 1".getBytes(StandardCharsets.UTF_8)))); + // Set up second community, collection and item, and third item + this.communityTwo = communityService.create(null, context); + this.collectionTwo = collectionService.create(context, communityTwo); + // Item two + workspaceItem = workspaceItemService.create(context, collectionTwo, false); + this.itemTwo = installItemService.installItem(context, workspaceItem); + // Add two bitstreams to item two + Bundle bundleTwo = bundleService.create(context, itemTwo, "ORIGINAL"); + bundleService.addBitstream(context, bundleTwo, bitstreamService.create(context, + new ByteArrayInputStream("Item 2 Bitstream 1".getBytes(StandardCharsets.UTF_8)))); + bundleService.addBitstream(context, bundleTwo, bitstreamService.create(context, + new ByteArrayInputStream("Item 2 Bitstream 2".getBytes(StandardCharsets.UTF_8)))); + // Item three + workspaceItem = workspaceItemService.create(context, collectionTwo, false); + this.itemThree = installItemService.installItem(context, workspaceItem); + // Add three bitstreams to item three + Bundle bundleThree = bundleService.create(context, itemThree, "ORIGINAL"); + bundleService.addBitstream(context, bundleThree, bitstreamService.create(context, + new ByteArrayInputStream("Item 3 Bitstream 1".getBytes(StandardCharsets.UTF_8)))); + bundleService.addBitstream(context, bundleThree, bitstreamService.create(context, + new ByteArrayInputStream("Item 3 Bitstream 2".getBytes(StandardCharsets.UTF_8)))); + bundleService.addBitstream(context, bundleThree, bitstreamService.create(context, + new ByteArrayInputStream("Item 3 Bitstream 2".getBytes(StandardCharsets.UTF_8)))); + + // Withdraw the second item for later testing + itemService.withdraw(context, itemTwo); + // Initialise metadata field for later testing with both items + this.metadataField = metadataFieldService.findByElement(context, + MetadataSchemaEnum.DC.getName(), element, qualifier); + context.restoreAuthSystemState(); + } catch (AuthorizeException | SQLException | IOException e) { + log.error("Error encountered during init", e); + fail("Error encountered during init: " + e.getMessage()); + } + } + + /** + * This method will be run after every test as per @After. It will + * clean resources initialized by the @Before methods. + * + * Other methods can be annotated with @After here or in subclasses + * but no execution order is guaranteed + */ + @After + @Override + public void destroy() { + context.turnOffAuthorisationSystem(); + // Delete resources + try { + itemService.delete(context, itemOne); + itemService.delete(context, itemTwo); + itemService.delete(context, itemThree); + collectionService.delete(context, collectionOne); + collectionService.delete(context, collectionTwo); + communityService.delete(context, communityOne); + communityService.delete(context, communityTwo); + } catch (Exception e) { + // ignore + log.error("Error cleaning up test resources: " + e.getMessage()); + } + context.restoreAuthSystemState(); + + // Set all class members to null + communityOne = null; + communityTwo = null; + collectionOne = null; + collectionTwo = null; + itemOne = null; + itemTwo = null; + itemThree = null; + trueStatements = null; + trueFalseStatements = null; + falseStatements = null; + trueStatementOne = null; + falseStatementOne = null; + element = null; + qualifier = null; + metadataField = null; + + super.destroy(); + } + + /** + * Test the AND operator with simple lists of logical statements + */ + @Test + public void testAndOperator() { + // Blank operator + And and = new And(); + // Try tests + try { + // Set to True, True (expect True) + and.setStatements(trueStatements); + assertTrue("AND operator did not return true for a list of true statements", + and.getResult(context, itemOne)); + // Set to True, False (expect False) + and.setStatements(trueFalseStatements); + assertFalse("AND operator did not return false for a list of statements with at least one false", + and.getResult(context, itemOne)); + // Set to False, False (expect False) + and.setStatements(falseStatements); + assertFalse("AND operator did not return false for a list of false statements", + and.getResult(context, itemOne)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the AND operator" + e.getMessage()); + } + } + + /** + * Test the OR operator with simple lists of logical statements + */ + @Test + public void testOrOperator() { + // Blank operator + Or or = new Or(); + // Try tests + try { + // Set to True, True (expect True) + or.setStatements(trueStatements); + assertTrue("OR operator did not return true for a list of true statements", + or.getResult(context, itemOne)); + // Set to True, False (expect True) + or.setStatements(trueFalseStatements); + assertTrue("OR operator did not return true for a list of statements with at least one false", + or.getResult(context, itemOne)); + // Set to False, False (expect False) + or.setStatements(falseStatements); + assertFalse("OR operator did not return false for a list of false statements", + or.getResult(context, itemOne)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the OR operator" + e.getMessage()); + } + } + + /** + * Test the NAND operator with simple lists of logical statements + */ + @Test + public void testNandOperator() { + // Blank operator + Nand nand = new Nand(); + // Try tests + try { + // Set to True, True (expect False) + nand.setStatements(trueStatements); + assertFalse("NAND operator did not return false for a list of true statements", + nand.getResult(context, itemOne)); + // Set to True, False (expect True) + nand.setStatements(trueFalseStatements); + assertTrue("NAND operator did not return true for a list of statements with at least one false", + nand.getResult(context, itemOne)); + // Set to False, False (expect True) + nand.setStatements(falseStatements); + assertTrue("NAND operator did not return true for a list of false statements", + nand.getResult(context, itemOne)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the NAND operator" + e.getMessage()); + } + } + + /** + * Test the NOR operator with simple lists of logical statements + */ + @Test + public void testNorOperator() { + // Blank operator + Nor nor = new Nor(); + // Try tests + try { + // Set to True, True (expect False) + nor.setStatements(trueStatements); + assertFalse("NOR operator did not return false for a list of true statements", + nor.getResult(context, itemOne)); + // Set to True, False (expect False) + nor.setStatements(trueFalseStatements); + assertFalse("NOR operator did not return false for a list of statements with a true and a false", + nor.getResult(context, itemOne)); + // Set to False, False (expect True) + nor.setStatements(falseStatements); + assertTrue("NOR operator did not return true for a list of false statements", + nor.getResult(context, itemOne)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the NOR operator" + e.getMessage()); + } + } + + /** + * Test the NOT operator with simple individual true/false statements + */ + @Test + public void testNotOperator() { + // Blank operator + Not not = new Not(); + // Try tests + try { + // Set to True (expect False) + not.setStatements(trueStatementOne); + assertFalse("NOT operator did not return false for a true statement", + not.getResult(context, itemOne)); + // Set to False (expect True) + not.setStatements(falseStatementOne); + assertTrue("NOT operator did not return true for a false statement", + not.getResult(context, itemOne)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the NOT operator" + e.getMessage()); + } + } + + /** + * Test a simple filter with a single logical statement: the MetadataValueMatchCondition + * looking for a dc.title field beginning with "TEST", and an item that doesn't match this test + */ + @Test + public void testMetadataValueMatchCondition() { + try { + MetadataValue metadataValueOne = metadataValueService.create(context, itemOne, metadataField); + MetadataValue metadataValueTwo = metadataValueService.create(context, itemTwo, metadataField); + metadataValueOne.setValue("TEST title should match the condition"); + metadataValueTwo.setValue("This title should not match the condition"); + } catch (SQLException e) { + fail("Encountered SQL error creating metadata value on item: " + e.getMessage()); + } + + // Instantiate new filter for testing this condition + DefaultFilter filter = new DefaultFilter(); + + // Create condition to match pattern on dc.title metadata + Condition condition = new MetadataValueMatchCondition(); + condition.setItemService(ContentServiceFactory.getInstance().getItemService()); + Map parameters = new HashMap<>(); + // Match on the dc.title field + parameters.put("field", "dc.title"); + // "Starts with "TEST" (case sensitive) + parameters.put("pattern", "^TEST"); + // Set up condition with these parameters and add it as the sole statement to the metadata filter + try { + condition.setParameters(parameters); + filter.setStatement(condition); + // Test the filter on the first item - expected outcome is true + assertTrue("itemOne unexpectedly did not match the 'dc.title starts with TEST' test", + filter.getResult(context, itemOne)); + // Test the filter on the second item - expected outcome is false + assertFalse("itemTwo unexpectedly matched the 'dc.title starts with TEST' test", + filter.getResult(context, itemTwo)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the MetadataValueMatchCondition filter" + e.getMessage()); + } + } + + /** + * Test a simple filter with a single logical statement: the MetadataValuesMatchCondition + * looking for a dc.title field beginning with "TEST" or "ALSO", and an item that doesn't match this test + */ + @Test + public void testMetadataValuesMatchCondition() { + try { + MetadataValue metadataValueOne = metadataValueService.create(context, itemOne, metadataField); + MetadataValue metadataValueTwo = metadataValueService.create(context, itemTwo, metadataField); + MetadataValue metadataValueThree = metadataValueService.create(context, itemThree, metadataField); + metadataValueOne.setValue("TEST this title should match the condition"); + metadataValueTwo.setValue("This title should match the condition, yEs"); + metadataValueThree.setValue("This title should not match the condition"); + } catch (SQLException e) { + fail("Encountered SQL error creating metadata value on item: " + e.getMessage()); + } + + // Instantiate new filter for testing this condition + DefaultFilter filter = new DefaultFilter(); + + // Create condition to match pattern on dc.title metadata + Condition condition = new MetadataValuesMatchCondition(); + condition.setItemService(ContentServiceFactory.getInstance().getItemService()); + Map parameters = new HashMap<>(); + // Match on the dc.title field + parameters.put("field", "dc.title"); + + List patterns = new ArrayList<>(); + // "Starts with "TEST" (case sensitive) + patterns.add("^TEST"); + // "Ends with 'yes' (case insensitive) + patterns.add("(?i)yes$"); + // Add the list of possible patterns + parameters.put("patterns", patterns); + + // Alternate parameters to test for a field where the item has no values + Map missingParameters = new HashMap<>(); + // Match on the dc.subject field - none of our test items have this field set + missingParameters.put("field", "dc.subject"); + // Add a pattern to the missing parameters + missingParameters.put("patterns", new ArrayList<>().add("TEST")); + + // Set up condition with these parameters and add it as the sole statement to the metadata filter + try { + condition.setParameters(parameters); + filter.setStatement(condition); + // Test the filter on the first item - expected outcome is true + assertTrue("itemOne unexpectedly did not match the " + + "'dc.title starts with TEST or ends with yes' test", filter.getResult(context, itemOne)); + // Test the filter on the second item - expected outcome is true + assertTrue("itemTwo unexpectedly did not match the " + + "'dc.title starts with TEST or ends with yes' test", filter.getResult(context, itemTwo)); + // Test the filter on the third item - expected outcome is false + assertFalse("itemThree unexpectedly matched the " + + "'dc.title starts with TEST or ends with yes' test", filter.getResult(context, itemThree)); + // Set condition and filter to use the missing field instead + condition.setParameters(missingParameters); + filter.setStatement(condition); + // Test this updated filter against the first item - expected outcome is false + assertFalse("itemOne unexpectedly matched the 'dc.subject contains TEST' test" + + "(it has no dc.subject metadata value)", filter.getResult(context, itemOne)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the MetadataValuesMatchCondition filter" + e.getMessage()); + } + } + + /** + * Test a simple filter with a single logical statement: the InCollectionCondition + * looking for an item that is in collectionOne, and one that is not in collectionOne + */ + @Test + public void testInCollectionCondition() { + // Instantiate new filter for testing this condition + DefaultFilter filter = new DefaultFilter(); + Condition condition = new InCollectionCondition(); + condition.setItemService(ContentServiceFactory.getInstance().getItemService()); + Map parameters = new HashMap<>(); + + // Add collectionOne handle to the collections parameter - ie. we are testing to see if the item is + // in collectionOne only + List collections = new ArrayList<>(); + collections.add(collectionOne.getHandle()); + parameters.put("collections", collections); + + try { + // Set parameters and condition + condition.setParameters(parameters); + filter.setStatement(condition); + + // Test the filter on the first item - this item is in collectionOne: expected outcome is true + assertTrue("itemOne unexpectedly did not match the 'item in collectionOne' test", + filter.getResult(context, itemOne)); + // Test the filter on the second item - this item is NOT in collectionOne: expected outcome is false + assertFalse("itemTwo unexpectedly matched the 'item in collectionOne' test", + filter.getResult(context, itemTwo)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the InCollectionCondition filter" + e.getMessage()); + } + } + + /** + * Test a simple filter with a single logical statement: the InCommunityCondition + * looking for an item that is in communityOne, and one that is not in communityOne + */ + @Test + public void testInCommunityCondition() { + // Instantiate new filter for testing this condition + DefaultFilter filter = new DefaultFilter(); + Condition condition = new InCommunityCondition(); + condition.setItemService(ContentServiceFactory.getInstance().getItemService()); + Map parameters = new HashMap<>(); + + // Add communitynOne handle to the communities parameter - ie. we are testing to see if the item is + // in communityOne only + List communities = new ArrayList<>(); + communities.add(communityOne.getHandle()); + parameters.put("communities", communities); + + try { + // Set parameters and condition + condition.setParameters(parameters); + filter.setStatement(condition); + + // Test the filter on the first item - this item is in communityOne: expected outcome is true + assertTrue("itemOne unexpectedly did not match the 'item in communityOne' test", + filter.getResult(context, itemOne)); + // Test the filter on the second item - this item is NOT in communityOne: expected outcome is false + assertFalse("itemTwo unexpectedly matched the 'item in communityOne' test", + filter.getResult(context, itemTwo)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the InCommunityCondition filter" + e.getMessage()); + } + } + + /** + * Test a simple filter with the IsWithdrawnCondition. During setup, itemTwo was withdrawn. + */ + @Test + public void testIsWithdrawnCondition() { + // Instantiate new filter for testing this condition + DefaultFilter filter = new DefaultFilter(); + Condition condition = new IsWithdrawnCondition(); + + try { + condition.setItemService(ContentServiceFactory.getInstance().getItemService()); + condition.setParameters(new HashMap<>()); + filter.setStatement(condition); + + // Test the filter on itemOne - this item is not withdrawn: expected outcome is false + assertFalse("itemOne unexpectedly matched the 'item is withdrawn' test", + filter.getResult(context, itemOne)); + // Test the filter on itemTwo - this item was withdrawn in setup: expected outcome is true + assertTrue("itemTwo unexpectedly did NOT match the 'item is withdrawn' test", + filter.getResult(context, itemTwo)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the IsWithdrawnCondition filter" + e.getMessage()); + } + } + + /** + * Test a simple filter with the BitstreamCountCondition. + */ + @Test + public void testBitstreamCountCondition() { + // Instantiate new filter for testing this condition + DefaultFilter filter = new DefaultFilter(); + Condition condition = new BitstreamCountCondition(); + + try { + condition.setItemService(ContentServiceFactory.getInstance().getItemService()); + + // Set parameters to check for items with at least 1 and at most 2 bitstreams in the ORIGINAL bundle + Map parameters = new HashMap<>(); + parameters.put("bundle", "ORIGINAL"); + parameters.put("min", String.valueOf(1)); + parameters.put("max", String.valueOf(2)); + condition.setParameters(parameters); + filter.setStatement(condition); + + // Test the filter on itemOne - this item has one THUMBNAIL but zero ORIGINAL bitstreams: expect false + assertFalse("itemOne unexpectedly matched the '>=1 and <=2 ORIGINAL bitstreams' test" + + " (it has zero ORIGINAL bitstreams)", filter.getResult(context, itemOne)); + // Test the filter on itemTwo - this item has two ORIGINAL bitstreams: expect true + assertTrue("itemTwo unexpectedly did NOT match the '>=1 and <=2 ORIGINAL bitstreams' test" + + " (it has 2 ORIGINAL bitstreams)", filter.getResult(context, itemTwo)); + // Test the filter on itemTwo - this item has three ORIGINAL bitstreams: expect false + assertFalse("itemThree unexpectedly did NOT match the '>=1 and <=2 ORIGINAL bitstreams' test" + + " (it has 3 ORIGINAL bitstreams)", filter.getResult(context, itemThree)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the IsWithdrawnCondition filter: " + e.getMessage()); + } + } + + /** + * Test a simple filter using the ReadableByGroupCondition + */ + @Test + public void testReadableByGroupCondition() { + // Instantiate new filter for testing this condition + DefaultFilter filter = new DefaultFilter(); + Condition condition = new ReadableByGroupCondition(); + + try { + condition.setItemService(ContentServiceFactory.getInstance().getItemService()); + + // Make item one readable by Test Group + try { + context.turnOffAuthorisationSystem(); + Group g = groupService.create(context); + groupService.setName(g, "Test Group"); + groupService.update(context, g); + authorizeService.addPolicy(context, itemOne, Constants.READ, g); + context.restoreAuthSystemState(); + } catch (AuthorizeException | SQLException e) { + fail("Exception thrown adding group READ policy to item: " + itemOne + ": " + e.getMessage()); + } + // Set parameters to check for items with Anonymous READ permission + Map parameters = new HashMap<>(); + parameters.put("group", "Test Group"); + parameters.put("action", "READ"); + condition.setParameters(parameters); + filter.setStatement(condition); + + // Test the filter on itemOne - this item was explicitly set with expected group READ policy + assertTrue("itemOne unexpectedly did not match the 'is readable by Test Group' test", + filter.getResult(context, itemOne)); + // Test the filter on itemTwo - this item has no policies: expect false + assertFalse("itemTwo unexpectedly matched the 'is readable by Test Group' test", + filter.getResult(context, itemTwo)); + } catch (LogicalStatementException e) { + log.error(e.getMessage()); + fail("LogicalStatementException thrown testing the ReadableByGroup filter" + e.getMessage()); + } + } + + /** + * Set up some simple statements for testing out operators + */ + private void setUpStatements() { + // Simple lambdas to define statements + // The two class members are used elsewhere, as direct statements for NOT testing + trueStatementOne = (context, item) -> true; + LogicalStatement trueStatementTwo = (context, item) -> true; + falseStatementOne = (context, item) -> false; + LogicalStatement falseStatementTwo = (context, item) -> false; + + // Create lists and add the statements + // True, True + trueStatements = new ArrayList<>(); + trueStatements.add(trueStatementOne); + trueStatements.add(trueStatementTwo); + // True, False + trueFalseStatements = new ArrayList<>(); + trueFalseStatements.add(trueStatementOne); + trueFalseStatements.add(falseStatementOne); + // False, False + falseStatements = new ArrayList<>(); + falseStatements.add(falseStatementOne); + falseStatements.add(falseStatementTwo); + } +} diff --git a/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java b/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java index 33e353f45768..a634b98130a6 100644 --- a/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java +++ b/dspace-api/src/test/java/org/dspace/content/packager/ITDSpaceAIP.java @@ -194,7 +194,7 @@ public static void setUpClass() { ePersonService.update(context, submitter); context.setCurrentUser(submitter); - //Make our test ePerson an admin so he can perform deletes and restores + //Make our test ePerson an admin so it can perform deletes and restores GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); Group adminGroup = groupService.findByName(context, Group.ADMIN); groupService.addMember(context, adminGroup, submitter); diff --git a/dspace-api/src/test/java/org/dspace/content/service/ItemServiceIT.java b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceIT.java new file mode 100644 index 000000000000..25eb0361592e --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/content/service/ItemServiceIT.java @@ -0,0 +1,919 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.service; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.sql.SQLException; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.requestitem.RequestItem; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.builder.RelationshipTypeBuilder; +import org.dspace.builder.RequestItemBuilder; +import org.dspace.builder.ResourcePolicyBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.Relationship; +import org.dspace.content.RelationshipType; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.dspace.versioning.Version; +import org.dspace.versioning.factory.VersionServiceFactory; +import org.dspace.versioning.service.VersioningService; +import org.junit.Before; +import org.junit.Test; + +public class ItemServiceIT extends AbstractIntegrationTestWithDatabase { + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemServiceIT.class); + + protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService(); + protected RelationshipTypeService relationshipTypeService = ContentServiceFactory.getInstance() + .getRelationshipTypeService(); + protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService(); + protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService(); + protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService(); + protected VersioningService versioningService = VersionServiceFactory.getInstance().getVersionService(); + protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + + Community community; + Collection collection1; + + Item item; + + String authorQualifier = "author"; + String contributorElement = "contributor"; + String dcSchema = "dc"; + String subjectElement = "subject"; + String descriptionElement = "description"; + String abstractQualifier = "abstract"; + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + */ + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + try { + context.turnOffAuthorisationSystem(); + + community = CommunityBuilder.createCommunity(context) + .build(); + + collection1 = CollectionBuilder.createCollection(context, community) + .withEntityType("Publication") + .build(); + + WorkspaceItem is = workspaceItemService.create(context, collection1, false); + + item = installItemService.installItem(context, is); + + context.restoreAuthSystemState(); + } catch (AuthorizeException ex) { + log.error("Authorization Error in init", ex); + fail("Authorization Error in init: " + ex.getMessage()); + } catch (SQLException ex) { + log.error("SQL Error in init", ex); + fail("SQL Error in init: " + ex.getMessage()); + } + } + + @Test + public void preserveMetadataOrder() throws Exception { + context.turnOffAuthorisationSystem(); + itemService + .addMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, one", null, 0, 2 + ); + MetadataValue placeZero = + itemService + .addMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, two", null, 0, 0 + ); + itemService + .addMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, three", null, 0, 1 + ); + + context.commit(); + context.restoreAuthSystemState(); + + // check the correct order using default method `getMetadata` + List defaultMetadata = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + + assertThat(defaultMetadata,hasSize(3)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, two", null, 0, defaultMetadata.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, defaultMetadata.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, defaultMetadata.get(2) + ); + + // check the correct order using the method `getMetadata` without virtual fields + List nonVirtualMetadatas = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY, false); + + // if we don't reload the item the place order is not applied correctly + // item = context.reloadEntity(item); + + assertThat(nonVirtualMetadatas,hasSize(3)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, two", null, 0, nonVirtualMetadatas.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, nonVirtualMetadatas.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, nonVirtualMetadatas.get(2) + ); + + context.turnOffAuthorisationSystem(); + + item = context.reloadEntity(item); + + // now just add one metadata to be the last + this.itemService.addMetadata( + context, item, dcSchema, contributorElement, authorQualifier, Item.ANY, "test, latest", null, 0 + ); + // now just remove first metadata + this.itemService.removeMetadataValues(context, item, List.of(placeZero)); + // now just add one metadata to place 0 + this.itemService.addAndShiftRightMetadata( + context, item, dcSchema, contributorElement, authorQualifier, Item.ANY, "test, new", null, 0, 0 + ); + + // check the metadata using method `getMetadata` + defaultMetadata = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + + // check correct places + assertThat(defaultMetadata,hasSize(4)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, new", null, 0, defaultMetadata.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, defaultMetadata.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, defaultMetadata.get(2) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, latest", null, 3, defaultMetadata.get(3) + ); + + // check metadata using nonVirtualMethod + nonVirtualMetadatas = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY, false); + + // check correct places + assertThat(nonVirtualMetadatas,hasSize(4)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, new", null, 0, nonVirtualMetadatas.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, nonVirtualMetadatas.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, nonVirtualMetadatas.get(2) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, latest", null, 3, nonVirtualMetadatas.get(3) + ); + + // check both lists + assertThat(defaultMetadata.size(), equalTo(nonVirtualMetadatas.size())); + assertThat(defaultMetadata.get(0), equalTo(nonVirtualMetadatas.get(0))); + assertThat(defaultMetadata.get(1), equalTo(nonVirtualMetadatas.get(1))); + assertThat(defaultMetadata.get(2), equalTo(nonVirtualMetadatas.get(2))); + assertThat(defaultMetadata.get(3), equalTo(nonVirtualMetadatas.get(3))); + + context.commit(); + context.restoreAuthSystemState(); + + item = context.reloadEntity(item); + + // check after commit + defaultMetadata = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + + // check correct places + assertThat(defaultMetadata,hasSize(4)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, new", null, 0, defaultMetadata.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, defaultMetadata.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, defaultMetadata.get(2) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, latest", null, 3, defaultMetadata.get(3) + ); + + // check metadata using nonVirtualMethod + nonVirtualMetadatas = + this.itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY, false); + + // check correct places + assertThat(nonVirtualMetadatas,hasSize(4)); + + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, new", null, 0, nonVirtualMetadatas.get(0) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, three", null, 1, nonVirtualMetadatas.get(1) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, one", null, 2, nonVirtualMetadatas.get(2) + ); + assertMetadataValue( + authorQualifier, contributorElement, dcSchema, "test, latest", null, 3, nonVirtualMetadatas.get(3) + ); + + // check both lists + assertThat(defaultMetadata.size(), equalTo(nonVirtualMetadatas.size())); + assertThat(defaultMetadata.get(0), equalTo(nonVirtualMetadatas.get(0))); + assertThat(defaultMetadata.get(1), equalTo(nonVirtualMetadatas.get(1))); + assertThat(defaultMetadata.get(2), equalTo(nonVirtualMetadatas.get(2))); + assertThat(defaultMetadata.get(3), equalTo(nonVirtualMetadatas.get(3))); + + } + + @Test + public void InsertAndMoveMetadataShiftPlaceTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Here we add the first set of metadata to the item + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three"); + + context.restoreAuthSystemState(); + + // The code below performs the mentioned assertions to ensure the place is correct + List list = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + assertThat(list.size(), equalTo(3)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list.get(2)); + + context.turnOffAuthorisationSystem(); + + // This is where we add metadata at place=1 + itemService.addAndShiftRightMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1 + ); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3)); + + // And move metadata from place=2 to place=0 + itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 2, 0); + + context.restoreAuthSystemState(); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3)); + } + + @Test + public void InsertAndMoveMetadataShiftPlaceTest_complex() throws Exception { + context.turnOffAuthorisationSystem(); + + // Here we add the first set of metadata to the item + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one"); + + // NOTE: dc.subject should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub1"); + // NOTE: dc.subject should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub2"); + + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three"); + + // NOTE: dc.description.abstract should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, descriptionElement, abstractQualifier, null, "test, abs1"); + + context.restoreAuthSystemState(); + + // The code below performs the mentioned assertions to ensure the place is correct + List list1 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + assertThat(list1.size(), equalTo(3)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list1.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list1.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list1.get(2)); + + List list2 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list2.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list2.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list2.get(1)); + + List list3 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list3.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list3.get(0)); + + context.turnOffAuthorisationSystem(); + + // This is where we add metadata at place=1 + itemService.addAndShiftRightMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1 + ); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + List list4 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list4.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list4.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list4.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list4.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list4.get(3)); + + List list5 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list5.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list5.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list5.get(1)); + + List list6 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list3.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list6.get(0)); + + // And move metadata from place=2 to place=0 + itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 2, 0); + + context.restoreAuthSystemState(); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + List list7 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list7.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 0, list7.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 1, list7.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list7.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list7.get(3)); + + List list8 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list8.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list8.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list8.get(1)); + + List list9 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list9.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list9.get(0)); + } + + @Test + public void InsertAndMoveMetadataOnePlaceForwardTest() throws Exception { + context.turnOffAuthorisationSystem(); + + // Here we add the first set of metadata to the item + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three"); + + context.restoreAuthSystemState(); + + // The code below performs the mentioned assertions to ensure the place is correct + List list = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + assertThat(list.size(), equalTo(3)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list.get(2)); + + context.turnOffAuthorisationSystem(); + + // This is where we add metadata at place=1 + itemService.addAndShiftRightMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1 + ); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3)); + + // And move metadata from place=1 to place=2 + itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 1, 2); + + context.restoreAuthSystemState(); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3)); + } + + @Test + public void InsertAndMoveMetadataOnePlaceForwardTest_complex() throws Exception { + context.turnOffAuthorisationSystem(); + + // NOTE: dc.description.abstract should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, descriptionElement, abstractQualifier, null, "test, abs1"); + + // Here we add the first set of metadata to the item + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one"); + + // NOTE: dc.subject should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub1"); + + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two"); + itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three"); + + // NOTE: dc.subject should NOT affect dc.contributor.author + itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub2"); + + context.restoreAuthSystemState(); + + // The code below performs the mentioned assertions to ensure the place is correct + List list1 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY); + assertThat(list1.size(), equalTo(3)); + + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list1.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list1.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list1.get(2)); + + List list2 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list2.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list2.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list2.get(1)); + + List list3 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list3.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list3.get(0)); + + context.turnOffAuthorisationSystem(); + + // This is where we add metadata at place=1 + itemService.addAndShiftRightMetadata( + context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1 + ); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + List list4 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list4.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list4.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list4.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list4.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list4.get(3)); + + List list5 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list5.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list5.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list5.get(1)); + + List list6 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list6.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list6.get(0)); + + // And move metadata from place=1 to place=2 + itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 1, 2); + + context.restoreAuthSystemState(); + + // Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned + List list7 = itemService + .getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY) + .stream() + .sorted(Comparator.comparingInt(MetadataValue::getPlace)) + .collect(Collectors.toList()); + assertThat(list7.size(), equalTo(4)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list7.get(0)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list7.get(1)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list7.get(2)); + assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list7.get(3)); + + List list8 = itemService + .getMetadata(item, dcSchema, subjectElement, null, Item.ANY); + assertThat(list8.size(), equalTo(2)); + + assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list8.get(0)); + assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list8.get(1)); + + List list9 = itemService + .getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY); + assertThat(list9.size(), equalTo(1)); + + assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list9.get(0)); + } + + @Test + public void testDeleteItemWithMultipleVersions() throws Exception { + context.turnOffAuthorisationSystem(); + + EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication") + .build(); + + EntityType personEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person") + .build(); + + RelationshipType isAuthorOfPublication = RelationshipTypeBuilder.createRelationshipTypeBuilder( + context, publicationEntityType, personEntityType, "isAuthorOfPublication", "isPublicationOfAuthor", + null, null, null, null + ) + .withCopyToLeft(false) + .withCopyToRight(false) + .build(); + + Collection collection2 = CollectionBuilder.createCollection(context, community) + .withEntityType("Person") + .build(); + + Item publication1 = ItemBuilder.createItem(context, collection1) + .withTitle("publication 1") + // NOTE: entity type comes from collection + .build(); + + Item person1 = ItemBuilder.createItem(context, collection2) + .withTitle("person 2") + // NOTE: entity type comes from collection + .build(); + + RelationshipBuilder.createRelationshipBuilder(context, publication1, person1, isAuthorOfPublication); + + // create a new version, which results in a non-latest relationship attached person 1. + Version newVersion = versioningService.createNewVersion(context, publication1); + Item newPublication1 = newVersion.getItem(); + WorkspaceItem newPublication1WSI = workspaceItemService.findByItem(context, newPublication1); + installItemService.installItem(context, newPublication1WSI); + context.dispatchEvents(); + + // verify person1 has a non-latest relationship, which should also be removed + List relationships1 = relationshipService.findByItem(context, person1, -1, -1, false, true); + assertEquals(1, relationships1.size()); + List relationships2 = relationshipService.findByItem(context, person1, -1, -1, false, false); + assertEquals(2, relationships2.size()); + + itemService.delete(context, person1); + + context.restoreAuthSystemState(); + } + + @Test + public void testFindItemsWithEditNoRights() throws Exception { + context.setCurrentUser(eperson); + List result = itemService.findItemsWithEdit(context, 0, 10); + int count = itemService.countItemsWithEdit(context); + assertThat(result.size(), equalTo(0)); + assertThat(count, equalTo(0)); + } + + @Test + public void testFindAndCountItemsWithEditEPerson() throws Exception { + ResourcePolicy rp = ResourcePolicyBuilder.createResourcePolicy(context) + .withUser(eperson) + .withDspaceObject(item) + .withAction(Constants.WRITE) + .build(); + context.setCurrentUser(eperson); + List result = itemService.findItemsWithEdit(context, 0, 10); + int count = itemService.countItemsWithEdit(context); + assertThat(result.size(), equalTo(1)); + assertThat(count, equalTo(1)); + } + + @Test + public void testFindAndCountItemsWithAdminEPerson() throws Exception { + ResourcePolicy rp = ResourcePolicyBuilder.createResourcePolicy(context) + .withUser(eperson) + .withDspaceObject(item) + .withAction(Constants.ADMIN) + .build(); + context.setCurrentUser(eperson); + List result = itemService.findItemsWithEdit(context, 0, 10); + int count = itemService.countItemsWithEdit(context); + assertThat(result.size(), equalTo(1)); + assertThat(count, equalTo(1)); + } + + @Test + public void testFindAndCountItemsWithEditGroup() throws Exception { + context.turnOffAuthorisationSystem(); + Group group = GroupBuilder.createGroup(context) + .addMember(eperson) + .build(); + context.restoreAuthSystemState(); + + ResourcePolicy rp = ResourcePolicyBuilder.createResourcePolicy(context) + .withGroup(group) + .withDspaceObject(item) + .withAction(Constants.WRITE) + .build(); + context.setCurrentUser(eperson); + List result = itemService.findItemsWithEdit(context, 0, 10); + int count = itemService.countItemsWithEdit(context); + assertThat(result.size(), equalTo(1)); + assertThat(count, equalTo(1)); + } + + @Test + public void testFindAndCountItemsWithAdminGroup() throws Exception { + context.turnOffAuthorisationSystem(); + Group group = GroupBuilder.createGroup(context) + .addMember(eperson) + .build(); + context.restoreAuthSystemState(); + + ResourcePolicy rp = ResourcePolicyBuilder.createResourcePolicy(context) + .withGroup(group) + .withDspaceObject(item) + .withAction(Constants.ADMIN) + .build(); + context.setCurrentUser(eperson); + List result = itemService.findItemsWithEdit(context, 0, 10); + int count = itemService.countItemsWithEdit(context); + assertThat(result.size(), equalTo(1)); + assertThat(count, equalTo(1)); + } + + @Test + public void testRemoveItemThatHasRequests() throws Exception { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection1) + .withTitle("Test") + .build(); + InputStream is = new ByteArrayInputStream(new byte[0]); + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, is) + .build(); + RequestItem requestItem = RequestItemBuilder.createRequestItem(context, item, bitstream) + .build(); + + itemService.delete(context, item); + context.dispatchEvents(); + context.restoreAuthSystemState(); + + assertNull(itemService.find(context, item.getID())); + } + + @Test + public void testMoveItemToCollectionWithMoreRestrictiveItemReadPolicy() throws Exception { + /* Verify that, if we move an item from a collection with a permissive default item READ policy + * to a collection with a restrictive default item READ policy, + * that the item and its bundles do not retain the original permissive item READ policy. + * However, its bitstreams do. + */ + + context.turnOffAuthorisationSystem(); + + Group anonymous = groupService.findByName(context, Group.ANONYMOUS); + Group admin = groupService.findByName(context, Group.ADMIN); + + // Set up the two different collections: one permissive and one restrictive in its default READ policy. + Collection permissive = CollectionBuilder + .createCollection(context, community) + .build(); + Collection restrictive = CollectionBuilder + .createCollection(context, community) + .build(); + authorizeService.removePoliciesActionFilter(context, restrictive, Constants.DEFAULT_ITEM_READ); + authorizeService.addPolicy(context, restrictive, Constants.DEFAULT_ITEM_READ, admin); + + // Add an item to the permissive collection. + Item item = ItemBuilder + .createItem(context, permissive) + .build(); + + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, InputStream.nullInputStream()) + .build(); + + Bundle bundle = item.getBundles("ORIGINAL").get(0); + + // Verify that the item, bundle and bitstream each have exactly one READ policy, for the anonymous group. + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + // Move the item to the restrictive collection, making sure to inherit default policies. + itemService.move(context, item, permissive, restrictive, true); + + // Verify that the item's read policy now only allows administrators. + assertEquals( + List.of(admin), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(admin), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + context.restoreAuthSystemState(); + } + + @Test + public void testMoveItemToCollectionWithMoreRestrictiveBitstreamReadPolicy() throws Exception { + /* Verify that, if we move an item from a collection with a permissive default bitstream READ policy + * to a collection with a restrictive default bitstream READ policy, + * that the item's bitstreams do not retain the original permissive READ policy. + * However, the item itself and its bundles do retain the original policy. + */ + + context.turnOffAuthorisationSystem(); + + Group anonymous = groupService.findByName(context, Group.ANONYMOUS); + Group admin = groupService.findByName(context, Group.ADMIN); + + // Set up the two different collections: one permissive and one restrictive in its default READ policy. + Collection permissive = CollectionBuilder + .createCollection(context, community) + .build(); + Collection restrictive = CollectionBuilder + .createCollection(context, community) + .build(); + authorizeService.removePoliciesActionFilter(context, restrictive, Constants.DEFAULT_BITSTREAM_READ); + authorizeService.addPolicy(context, restrictive, Constants.DEFAULT_BITSTREAM_READ, admin); + + // Add an item to the permissive collection. + Item item = ItemBuilder + .createItem(context, permissive) + .build(); + + Bitstream bitstream = BitstreamBuilder.createBitstream(context, item, InputStream.nullInputStream()) + .build(); + + Bundle bundle = item.getBundles("ORIGINAL").get(0); + + // Verify that the item, bundle and bitstream each have exactly one READ policy, for the anonymous group. + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + // Move the item to the restrictive collection, making sure to inherit default policies. + itemService.move(context, item, permissive, restrictive, true); + + // Verify that the bundle and bitstream's read policies now only allows administrators. + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, item, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(anonymous), + authorizeService.getPoliciesActionFilter(context, bundle, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + assertEquals( + List.of(admin), + authorizeService.getPoliciesActionFilter(context, bitstream, Constants.READ) + .stream().map(ResourcePolicy::getGroup).collect(Collectors.toList()) + ); + + context.restoreAuthSystemState(); + + } + + private void assertMetadataValue(String authorQualifier, String contributorElement, String dcSchema, String value, + String authority, int place, MetadataValue metadataValue) { + assertThat(metadataValue.getValue(), equalTo(value)); + assertThat(metadataValue.getMetadataField().getMetadataSchema().getName(), equalTo(dcSchema)); + assertThat(metadataValue.getMetadataField().getElement(), equalTo(contributorElement)); + assertThat(metadataValue.getMetadataField().getQualifier(), equalTo(authorQualifier)); + assertThat(metadataValue.getAuthority(), equalTo(authority)); + assertThat(metadataValue.getPlace(), equalTo(place)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/core/ContextIT.java b/dspace-api/src/test/java/org/dspace/core/ContextIT.java new file mode 100644 index 000000000000..6cf8336171f2 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/core/ContextIT.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.core; + +import static org.junit.Assert.assertEquals; + +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.builder.CommunityBuilder; +import org.junit.Test; + +public class ContextIT extends AbstractIntegrationTestWithDatabase { + + AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + + @Test + public void testGetPoliciesNewCommunityAfterReadOnlyModeChange() throws Exception { + + context.turnOffAuthorisationSystem(); + + // First disable the index consumer. The indexing process calls the authorizeService + // function used in this test and may affect the test + context.setDispatcher("noindex"); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + context.restoreAuthSystemState(); + + context.setMode(Context.Mode.READ_ONLY); + + List policies = authorizeService.getPoliciesActionFilter(context, parentCommunity, + Constants.READ); + + assertEquals("Should return the default anonymous group read policy", 1, policies.size()); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/core/ContextTest.java b/dspace-api/src/test/java/org/dspace/core/ContextTest.java index 811582c569a1..c6cd849d2110 100644 --- a/dspace-api/src/test/java/org/dspace/core/ContextTest.java +++ b/dspace-api/src/test/java/org/dspace/core/ContextTest.java @@ -8,6 +8,7 @@ package org.dspace.core; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; @@ -511,9 +512,8 @@ public void testGetSpecialGroups() throws SQLException, AuthorizeException, IOEx // Now get our special groups List specialGroups = instance.getSpecialGroups(); - assertThat("testGetSpecialGroup 0", specialGroups.size(), equalTo(2)); - assertThat("testGetSpecialGroup 1", specialGroups.get(0), equalTo(group)); - assertThat("testGetSpecialGroup 1", specialGroups.get(1), equalTo(adminGroup)); + assertThat("testGetSpecialGroup size", specialGroups.size(), equalTo(2)); + assertThat("testGetSpecialGroup content", specialGroups, hasItems(group, adminGroup)); // Cleanup our context & group groupService.delete(instance, group); diff --git a/dspace-api/src/test/java/org/dspace/core/UtilsTest.java b/dspace-api/src/test/java/org/dspace/core/UtilsTest.java index 920fa69d6d31..291561ac2536 100644 --- a/dspace-api/src/test/java/org/dspace/core/UtilsTest.java +++ b/dspace-api/src/test/java/org/dspace/core/UtilsTest.java @@ -75,6 +75,12 @@ public void testGetHostName() { assertEquals("Test keep other prefixes", "demo.dspace.org", Utils.getHostName("https://demo.dspace.org")); + assertEquals("Test with parameter", "demo.dspace.org", + Utils.getHostName("https://demo.dspace.org/search?query=test")); + + assertEquals("Test with parameter with space", "demo.dspace.org", + Utils.getHostName("https://demo.dspace.org/search?query=test turbine")); + // This uses a bunch of reserved URI characters assertNull("Test invalid URI returns null", Utils.getHostName("&+,?/@=")); } diff --git a/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java b/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java new file mode 100644 index 000000000000..2a07799deee5 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/ctask/general/CreateMissingIdentifiersIT.java @@ -0,0 +1,89 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.ctask.general; + +import static org.junit.Assert.assertEquals; + +import java.io.IOException; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.core.factory.CoreServiceFactory; +import org.dspace.curate.Curator; +import org.dspace.identifier.VersionedHandleIdentifierProviderWithCanonicalHandles; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Test; + +/** + * Rudimentary test of the curation task. + * + * @author mwood + */ +public class CreateMissingIdentifiersIT + extends AbstractIntegrationTestWithDatabase { + private static final String P_TASK_DEF + = "plugin.named.org.dspace.curate.CurationTask"; + private static final String TASK_NAME = "test"; + + @Test + public void testPerform() + throws IOException { + // Must remove any cached named plugins before creating a new one + CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses(); + ConfigurationService configurationService = kernelImpl.getConfigurationService(); + // Define a new task dynamically + configurationService.setProperty(P_TASK_DEF, + CreateMissingIdentifiers.class.getCanonicalName() + " = " + TASK_NAME); + + Curator curator = new Curator(); + curator.addTask(TASK_NAME); + + context.setCurrentUser(admin); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .build(); + Item item = ItemBuilder.createItem(context, collection) + .build(); + + /* + * Curate with regular test configuration -- should succeed. + */ + curator.curate(context, item); + int status = curator.getStatus(TASK_NAME); + assertEquals("Curation should succeed", Curator.CURATE_SUCCESS, status); + + /* + * Now install an incompatible provider to make the task fail. + */ + DSpaceServicesFactory.getInstance() + .getServiceManager() + .registerServiceClass( + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getCanonicalName(), + VersionedHandleIdentifierProviderWithCanonicalHandles.class); + + curator.curate(context, item); + System.out.format("With incompatible provider, result is '%s'.\n", + curator.getResult(TASK_NAME)); + assertEquals("Curation should fail", Curator.CURATE_ERROR, + curator.getStatus(TASK_NAME)); + } + + @Override + @After + public void destroy() throws Exception { + super.destroy(); + DSpaceServicesFactory.getInstance().getServiceManager().getApplicationContext().refresh(); + } +} diff --git a/dspace-api/src/test/java/org/dspace/curate/CurationIT.java b/dspace-api/src/test/java/org/dspace/curate/CurationIT.java index 6232793c7408..31bfe2550a4a 100644 --- a/dspace-api/src/test/java/org/dspace/curate/CurationIT.java +++ b/dspace-api/src/test/java/org/dspace/curate/CurationIT.java @@ -43,8 +43,9 @@ public void curationWithoutEPersonParameterTest() throws Exception { script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } } @@ -69,8 +70,9 @@ public void curationWithEPersonParameterTest() throws Exception { script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration); } if (script != null) { - script.initialize(args, testDSpaceRunnableHandler, null); - script.run(); + if (DSpaceRunnable.StepResult.Continue.equals(script.initialize(args, testDSpaceRunnableHandler, null))) { + script.run(); + } } } } diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 9518f5907191..55be531418ae 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -7,18 +7,28 @@ */ package org.dspace.discovery; +import static org.dspace.discovery.SolrServiceWorkspaceWorkflowRestrictionPlugin.DISCOVER_WORKSPACE_CONFIGURATION_NAME; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedList; import java.util.List; +import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; import org.dspace.authorize.AuthorizeException; import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.PoolTaskBuilder; import org.dspace.builder.WorkflowItemBuilder; @@ -34,6 +44,8 @@ import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.discovery.configuration.DiscoveryConfiguration; +import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.indexobject.IndexableClaimedTask; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableItem; @@ -55,6 +67,7 @@ import org.dspace.xmlworkflow.storedcomponents.PoolTask; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService; +import org.junit.Before; import org.junit.Test; import org.springframework.mock.web.MockHttpServletRequest; @@ -64,7 +77,7 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); - protected SearchService searchService = SearchUtils.getSearchService(); + protected SearchService searchService; XmlWorkflowService workflowService = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService(); @@ -86,6 +99,14 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory.getInstance() .getMetadataAuthorityService(); + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + configurationService.setProperty("solr-database-resync.time-until-reindex", 1); + searchService = SearchUtils.getSearchService(); + } + @Test public void solrRecordsAfterDepositOrDeletionOfWorkspaceItemTest() throws Exception { context.turnOffAuthorisationSystem(); @@ -371,7 +392,8 @@ public void verifySolrRecordsOfDeletedObjectsTest() throws Exception { collectionService.delete(context, col1); context.restoreAuthSystemState(); assertSearchQuery(IndexableCollection.TYPE, 2); - assertSearchQuery(IndexableItem.TYPE, 2); + // Deleted item contained within totalFound due to predb status (SolrDatabaseResyncCli takes care of this) + assertSearchQuery(IndexableItem.TYPE, 2, 3, 0, -1); } @Test @@ -453,6 +475,10 @@ public void verifySolrRecordsOfDeletedObjectsPaginationTest() throws Exception { assertSearchQuery(IndexableCollection.TYPE, 2, 2, 0, -1); // check Item type with start=0 and limit=2, we expect: indexableObjects=2, totalFound=6 assertSearchQuery(IndexableItem.TYPE, 2, 6, 0, 2); + + // Run SolrDatabaseResyncCli, updating items with "preDB" status and removing stale items + performSolrDatabaseResyncScript(); + // check Item type with start=2 and limit=4, we expect: indexableObjects=1, totalFound=3 assertSearchQuery(IndexableItem.TYPE, 1, 3, 2, 4); // check Item type with start=0 and limit=default, we expect: indexableObjects=3, totalFound=3 @@ -639,16 +665,143 @@ public void disabledRerunOfSolrQueryDueToStaleObjectsTest() throws Exception { // check Item type with start=0 and limit=default, // we expect: indexableObjects=3, totalFound=6 (3 stale objects here) assertSearchQuery(IndexableItem.TYPE, 3, 6, 0, -1); - // as the previous query hit the stale objects running a new query should lead to a clean situation + + // Run SolrDatabaseResyncCli, updating items with "preDB" status and removing stale items + performSolrDatabaseResyncScript(); + + // as SolrDatabaseResyncCli removed the stale objects, running a new query should lead to a clean situation assertSearchQuery(IndexableItem.TYPE, 3, 3, 0, -1); } + @Test + public void iteratorSearchServiceTest() throws SearchServiceException { + String subject1 = "subject1"; + String subject2 = "subject2"; + int numberItemsSubject1 = 30; + int numberItemsSubject2 = 2; + Item[] itemsSubject1 = new Item[numberItemsSubject1]; + Item[] itemsSubject2 = new Item[numberItemsSubject2]; + context.turnOffAuthorisationSystem(); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + for (int i = 0; i < numberItemsSubject1; i++) { + itemsSubject1[i] = ItemBuilder.createItem(context, collection) + .withTitle("item subject 1 number" + i) + .withSubject(subject1) + .build(); + } + + for (int i = 0; i < numberItemsSubject2; i++) { + itemsSubject2[i] = ItemBuilder.createItem(context, collection) + .withTitle("item subject 2 number " + i) + .withSubject(subject2) + .build(); + } + + Collection collection2 = CollectionBuilder.createCollection(context, community).build(); + ItemBuilder.createItem(context, collection2) + .withTitle("item collection2") + .withSubject(subject1) + .build(); + context.restoreAuthSystemState(); + + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.addFilterQueries("subject:" + subject1); + + Iterator itemIterator = + searchService.iteratorSearch(context, new IndexableCollection(collection), discoverQuery); + int counter = 0; + List foundItems = new ArrayList<>(); + while (itemIterator.hasNext()) { + foundItems.add(itemIterator.next()); + counter++; + } + for (Item item : itemsSubject1) { + assertTrue(foundItems.contains(item)); + } + assertEquals(numberItemsSubject1, counter); + + discoverQuery = new DiscoverQuery(); + discoverQuery.addFilterQueries("subject:" + subject2); + + itemIterator = searchService.iteratorSearch(context, null, discoverQuery); + counter = 0; + foundItems = new ArrayList<>(); + while (itemIterator.hasNext()) { + foundItems.add(itemIterator.next()); + counter++; + } + assertEquals(numberItemsSubject2, counter); + for (Item item : itemsSubject2) { + assertTrue(foundItems.contains(item)); + } + } + + /** + * Test designed to check if default sort option for Discovery is working, using workspace + * DiscoveryConfiguration
    + * Note: this test will be skipped if workspace do not have a default sort option set and of + * metadataType dc_date_accessioned or lastModified + * @throws SearchServiceException + */ + @Test + public void searchWithDefaultSortServiceTest() throws SearchServiceException { + DiscoveryConfiguration workspaceConf = + SearchUtils.getDiscoveryConfiguration(context, DISCOVER_WORKSPACE_CONFIGURATION_NAME, null); + // Skip if no default sort option set for workspaceConf + if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) { + return; + } + + DiscoverySortFieldConfiguration defaultSortField = + workspaceConf.getSearchSortConfiguration().getDefaultSortField(); + + // Populate the testing objects: create items in eperson's workspace and perform search in it + int numberItems = 10; + context.turnOffAuthorisationSystem(); + EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build(); + context.setCurrentUser(submitter); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + for (int i = 0; i < numberItems; i++) { + ItemBuilder.createItem(context, collection) + .withTitle("item " + i) + .build(); + } + context.restoreAuthSystemState(); + + // Build query with default parameters (except for workspaceConf) + DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder() + .buildQuery(context, new IndexableCollection(collection), workspaceConf,"",null,"Item",null,null, + null,null); + + DiscoverResult result = searchService.search(context, discoverQuery); + + /* + // code example for testing against sort by dc_date_accessioned + LinkedList dc_date_accesioneds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getMetadata()) + .map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned")) + .map(m -> m.getValue()).findFirst().orElse("") + ) + .collect(Collectors.toCollection(LinkedList::new)); + }*/ + LinkedList lastModifieds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getLastModified().toString()) + .collect(Collectors.toCollection(LinkedList::new)); + assertFalse(lastModifieds.isEmpty()); + for (int i = 1; i < lastModifieds.size() - 1; i++) { + assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0); + } + } + private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { assertSearchQuery(resourceType, size, size, 0, -1); } private void assertSearchQuery(String resourceType, int size, int totalFound, int start, int limit) - throws SearchServiceException { + throws SearchServiceException { DiscoverQuery discoverQuery = new DiscoverQuery(); discoverQuery.setQuery("*:*"); discoverQuery.setStart(start); @@ -665,7 +818,7 @@ private void deposit(WorkspaceItem workspaceItem) throws SQLException, AuthorizeException, IOException, WorkflowException, SearchServiceException { context.turnOffAuthorisationSystem(); workspaceItem = context.reloadEntity(workspaceItem); - XmlWorkflowItem workflowItem = workflowService.startWithoutNotify(context, workspaceItem); + XmlWorkflowItem unusedWorkflowItem = workflowService.startWithoutNotify(context, workspaceItem); context.commit(); indexer.commit(); context.restoreAuthSystemState(); @@ -739,6 +892,13 @@ private void executeWorkflowAction(HttpServletRequest httpServletRequest, Workfl context.setCurrentUser(previousUser); } + public void performSolrDatabaseResyncScript() throws Exception { + String[] args = new String[] {"solr-database-resync"}; + TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); + ScriptLauncher + .handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); + } + private void abort(XmlWorkflowItem workflowItem) throws SQLException, AuthorizeException, IOException, SearchServiceException { final EPerson previousUser = context.getCurrentUser(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/DiscoverQueryBuilderTest.java b/dspace-api/src/test/java/org/dspace/discovery/utils/DiscoverQueryBuilderTest.java similarity index 79% rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/DiscoverQueryBuilderTest.java rename to dspace-api/src/test/java/org/dspace/discovery/utils/DiscoverQueryBuilderTest.java index 9a8f07e76a35..07652e8c0c4e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/DiscoverQueryBuilderTest.java +++ b/dspace-api/src/test/java/org/dspace/discovery/utils/DiscoverQueryBuilderTest.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.utils; +package org.dspace.discovery.utils; import static java.util.Collections.emptyList; import static org.dspace.discovery.configuration.DiscoveryConfigurationParameters.SORT.COUNT; @@ -16,10 +16,10 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isEmptyOrNullString; import static org.junit.Assert.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; @@ -35,9 +35,6 @@ import java.util.List; import java.util.function.Function; -import org.dspace.app.rest.exception.DSpaceBadRequestException; -import org.dspace.app.rest.exception.InvalidSearchRequestException; -import org.dspace.app.rest.parameter.SearchFilter; import org.dspace.core.Context; import org.dspace.discovery.DiscoverFacetField; import org.dspace.discovery.DiscoverFilterQuery; @@ -45,6 +42,7 @@ import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.FacetYearRange; import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SolrServiceImpl; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; @@ -56,6 +54,7 @@ import org.dspace.discovery.configuration.HierarchicalSidebarFacetConfiguration; import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.discovery.indexobject.factory.IndexFactory; +import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter; import org.dspace.services.ConfigurationService; import org.hamcrest.FeatureMatcher; import org.hamcrest.Matcher; @@ -65,8 +64,7 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; -import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.Sort; + /** * Unit tests for {@link DiscoverQueryBuilder} @@ -94,8 +92,14 @@ public class DiscoverQueryBuilderTest { private DiscoveryConfiguration discoveryConfiguration; private String query; - private SearchFilter searchFilter; - private PageRequest page; + + private int pageSize = 10; + private long offset = 10; + private String sortProperty = "dc.title"; + private String sortDirection = "ASC"; + + private QueryBuilderSearchFilter searchFilter; + @Before public void setUp() throws Exception { @@ -106,33 +110,35 @@ public void setUp() throws Exception { when(configurationService.getIntProperty(eq("rest.search.max.results"), anyInt())).thenReturn(100); when(searchService.toSortFieldIndex(any(String.class), any(String.class))) - .then(invocation -> invocation.getArguments()[0] + "_sort"); + .then(invocation -> invocation.getArguments()[0] + "_sort"); when(searchService - .getFacetYearRange(eq(context), nullable(IndexableObject.class), any(DiscoverySearchFilterFacet.class), - any(), any(DiscoverQuery.class))) - .then(invocation -> new FacetYearRange((DiscoverySearchFilterFacet) invocation.getArguments()[2])); + .getFacetYearRange(eq(context), nullable(IndexableObject.class), + any(DiscoverySearchFilterFacet.class), + any(), any(DiscoverQuery.class))) + .then(invocation -> new FacetYearRange((DiscoverySearchFilterFacet) invocation.getArguments()[2])); when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class), - any(DiscoveryConfiguration.class))) - .then(invocation -> new DiscoverFilterQuery((String) invocation.getArguments()[1], - invocation.getArguments()[1] + ":\"" + invocation.getArguments()[3] + "\"", - (String) invocation.getArguments()[3])); + any(DiscoveryConfiguration.class))) + .then(invocation -> new DiscoverFilterQuery((String) invocation.getArguments()[1], + invocation.getArguments()[1] + ":\"" + invocation + .getArguments()[3] + "\"", + (String) invocation.getArguments()[3])); discoveryConfiguration = new DiscoveryConfiguration(); discoveryConfiguration.setDefaultFilterQueries(Arrays.asList("archived:true")); DiscoveryHitHighlightingConfiguration discoveryHitHighlightingConfiguration = - new DiscoveryHitHighlightingConfiguration(); + new DiscoveryHitHighlightingConfiguration(); List discoveryHitHighlightFieldConfigurations = new LinkedList<>(); DiscoveryHitHighlightFieldConfiguration discoveryHitHighlightFieldConfiguration = - new DiscoveryHitHighlightFieldConfiguration(); + new DiscoveryHitHighlightFieldConfiguration(); discoveryHitHighlightFieldConfiguration.setField("dc.title"); DiscoveryHitHighlightFieldConfiguration discoveryHitHighlightFieldConfiguration1 = - new DiscoveryHitHighlightFieldConfiguration(); + new DiscoveryHitHighlightFieldConfiguration(); discoveryHitHighlightFieldConfiguration1.setField("fulltext"); discoveryHitHighlightFieldConfigurations.add(discoveryHitHighlightFieldConfiguration1); @@ -177,9 +183,8 @@ public void setUp() throws Exception { discoveryConfiguration.setSidebarFacets(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet)); discoveryConfiguration.setSearchFilters(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet)); + searchFilter = new QueryBuilderSearchFilter("subject", "equals", "Java"); query = "my test case"; - searchFilter = new SearchFilter("subject", "equals", "Java"); - page = PageRequest.of(1, 10, Sort.Direction.ASC, "dc.title"); queryBuilder.afterPropertiesSet(); } @@ -188,7 +193,8 @@ public void setUp() throws Exception { public void testBuildQuery() throws Exception { DiscoverQuery discoverQuery = queryBuilder - .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "item", page); + .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter), + "item", pageSize, offset, sortProperty, sortDirection); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\"")); assertThat(discoverQuery.getQuery(), is(query)); @@ -214,10 +220,11 @@ public void testBuildQuery() throws Exception { @Test public void testBuildQueryDefaults() throws Exception { DiscoverQuery discoverQuery = - queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), null); + queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), null, null, + null, null); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true")); - assertThat(discoverQuery.getQuery(), is(emptyOrNullString())); + assertThat(discoverQuery.getQuery(), isEmptyOrNullString()); assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty())); //Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid // mock @@ -241,13 +248,12 @@ public void testBuildQueryDefaults() throws Exception { @Test public void testSortByScore() throws Exception { - page = PageRequest.of(2, 10, Sort.Direction.ASC, "SCORE"); - DiscoverQuery discoverQuery = - queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); + queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), 10, 20L, + "SCORE", "ASC"); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true")); - assertThat(discoverQuery.getQuery(), is(emptyOrNullString())); + assertThat(discoverQuery.getQuery(), isEmptyOrNullString()); assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty())); //Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid // mock @@ -269,48 +275,50 @@ public void testSortByScore() throws Exception { )); } - @Test(expected = DSpaceBadRequestException.class) + @Test(expected = IllegalArgumentException.class) public void testInvalidDSOType() throws Exception { queryBuilder - .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "TEST", page); + .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter), + "TEST", pageSize, offset, sortProperty, sortDirection); } - @Test(expected = InvalidSearchRequestException.class) + @Test(expected = SearchServiceException.class) public void testInvalidSortField() throws Exception { - page = PageRequest.of(2, 10, Sort.Direction.ASC, "test"); queryBuilder - .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); + .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter), + "ITEM", pageSize, 20L, "test", sortDirection); } - @Test(expected = DSpaceBadRequestException.class) + @Test(expected = IllegalArgumentException.class) public void testInvalidSearchFilter1() throws Exception { - searchFilter = new SearchFilter("test", "equals", "Smith, Donald"); + searchFilter = new QueryBuilderSearchFilter("test", "equals", "Smith, Donald"); queryBuilder - .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); + .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", + pageSize, offset, sortProperty, sortDirection); } - @Test(expected = DSpaceBadRequestException.class) + @Test(expected = IllegalArgumentException.class) public void testInvalidSearchFilter2() throws Exception { when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class), - any(DiscoveryConfiguration.class))) - .thenThrow(SQLException.class); + any(DiscoveryConfiguration.class))) + .thenThrow(SQLException.class); queryBuilder - .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); + .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", + pageSize, offset, sortProperty, sortDirection); } @Test public void testBuildFacetQuery() throws Exception { - DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, - "prefix", query, - Arrays.asList(searchFilter), "item", page, - "subject"); + DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, "prefix", + query, Collections.singletonList(searchFilter), + "item", pageSize, offset, "subject"); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\"")); assertThat(discoverQuery.getQuery(), is(query)); assertThat(discoverQuery.getDSpaceObjectFilters(), contains(IndexableItem.TYPE)); - assertThat(discoverQuery.getSortField(), is(emptyOrNullString())); + assertThat(discoverQuery.getSortField(), isEmptyOrNullString()); assertThat(discoverQuery.getMaxResults(), is(0)); assertThat(discoverQuery.getStart(), is(0)); assertThat(discoverQuery.getFacetMinCount(), is(1)); @@ -321,10 +329,10 @@ public void testBuildFacetQuery() throws Exception { )); } - @Test(expected = DSpaceBadRequestException.class) + @Test(expected = IllegalArgumentException.class) public void testInvalidSearchFacet() throws Exception { queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, null, query, - Arrays.asList(searchFilter), "item", page, "test"); + Collections.singletonList(searchFilter), "item", pageSize, offset, "test"); } public Matcher discoverFacetFieldMatcher(DiscoverFacetField expected) { diff --git a/dspace-api/src/test/java/org/dspace/eperson/EPersonCLIToolIT.java b/dspace-api/src/test/java/org/dspace/eperson/EPersonCLIToolIT.java new file mode 100644 index 000000000000..df86b0883a0e --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/eperson/EPersonCLIToolIT.java @@ -0,0 +1,153 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.eperson; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; + +import org.dspace.AbstractIntegrationTest; +import org.dspace.util.FakeConsoleServiceImpl; +import org.junit.Rule; +import org.junit.Test; +import org.junit.contrib.java.lang.system.ExpectedSystemExit; +import org.junit.contrib.java.lang.system.SystemErrRule; + +/** + * + * @author Mark H. Wood + */ +public class EPersonCLIToolIT + extends AbstractIntegrationTest { + private static final String NEW_PASSWORD = "secret"; + private static final String BAD_PASSWORD = "not secret"; + + // Handle System.exit() from unit under test. + @Rule + public final ExpectedSystemExit exit = ExpectedSystemExit.none(); + + // Capture System.err() output. + @Rule + public final SystemErrRule sysErr = new SystemErrRule().enableLog(); + + /** + * Test --modify --newPassword + * @throws Exception passed through. + */ + @Test + @SuppressWarnings("static-access") + public void testSetPassword() + throws Exception { + exit.expectSystemExitWithStatus(0); + System.out.println("main"); + + // Create a source of "console" input. + FakeConsoleServiceImpl consoleService = new FakeConsoleServiceImpl(); + consoleService.setPassword(NEW_PASSWORD.toCharArray()); + + // Make certain that we know the eperson's email and old password hash. + String email = eperson.getEmail(); + String oldPasswordHash = eperson.getPassword(); + + // Instantiate the unit under test. + EPersonCLITool instance = new EPersonCLITool(); + instance.setConsoleService(consoleService); + + // Test! + String[] argv = { + "--modify", + "--email", email, + "--newPassword" + }; + instance.main(argv); + + String newPasswordHash = eperson.getPassword(); + assertNotEquals("Password hash did not change", oldPasswordHash, newPasswordHash); + } + + /** + * Test --modify --newPassword with an empty password + * @throws Exception passed through. + */ + @Test + @SuppressWarnings("static-access") + public void testSetEmptyPassword() + throws Exception { + exit.expectSystemExitWithStatus(0); + System.out.println("main"); + + // Create a source of "console" input. + FakeConsoleServiceImpl consoleService = new FakeConsoleServiceImpl(); + consoleService.setPassword(new char[0]); + + // Make certain that we know the eperson's email and old password hash. + String email = eperson.getEmail(); + String oldPasswordHash = eperson.getPassword(); + + // Instantiate the unit under test. + EPersonCLITool instance = new EPersonCLITool(); + instance.setConsoleService(consoleService); + + // Test! + String[] argv = { + "--modify", + "--email", email, + "--newPassword" + }; + instance.main(argv); + + String newPasswordHash = eperson.getPassword(); + assertEquals("Password hash changed", oldPasswordHash, newPasswordHash); + + String stderr = sysErr.getLog(); + assertTrue("Standard error did not mention 'empty'", + stderr.contains(EPersonCLITool.ERR_PASSWORD_EMPTY)); + } + + /** + * Test --modify --newPassword with mismatched confirmation. + * This tests what happens when the user enters different strings at the + * first and second new-password prompts. + * @throws Exception passed through. + */ + @Test + @SuppressWarnings("static-access") + public void testSetMismatchedPassword() + throws Exception { + exit.expectSystemExitWithStatus(0); + System.out.println("main"); + + // Create a source of "console" input. + FakeConsoleServiceImpl consoleService = new FakeConsoleServiceImpl(); + consoleService.setPassword1(NEW_PASSWORD.toCharArray()); + consoleService.setPassword2(BAD_PASSWORD.toCharArray()); + + // Make certain that we know the eperson's email and old password hash. + String email = eperson.getEmail(); + String oldPasswordHash = eperson.getPassword(); + + // Instantiate the unit under test. + EPersonCLITool instance = new EPersonCLITool(); + instance.setConsoleService(consoleService); + + // Test! + String[] argv = { + "--modify", + "--email", email, + "--newPassword" + }; + instance.main(argv); + + String newPasswordHash = eperson.getPassword(); + assertEquals("Password hash changed", oldPasswordHash, newPasswordHash); + + String stderr = sysErr.getLog(); + assertTrue("Standard error did not indicate password mismatch", + stderr.contains(EPersonCLITool.ERR_PASSWORD_NOMATCH)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java index 24bc00cce47a..3780afcf6393 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/EPersonTest.java @@ -8,17 +8,23 @@ package org.dspace.eperson; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Iterator; import java.util.List; +import java.util.Set; import javax.mail.MessagingException; import org.apache.commons.codec.DecoderException; +import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; @@ -274,63 +280,184 @@ public void testFindByNetid() */ /** - * Test of search method, of class EPerson. + * Test of search() and searchResultCount() methods of EPersonService + * NOTE: Pagination is not verified here because it is tested in EPersonRestRepositoryIT */ -/* @Test - public void testSearch_Context_String() - throws Exception - { - System.out.println("search"); - Context context = null; - String query = ""; - EPerson[] expResult = null; - EPerson[] result = EPerson.search(context, query); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); + public void testSearchAndCountByNameEmail() throws SQLException, AuthorizeException, IOException { + List allEPeopleAdded = new ArrayList<>(); + Group testGroup = createGroup("TestingGroup"); + try { + // Create 4 EPersons. Add a few to a test group to verify group membership doesn't matter + EPerson eperson1 = createEPersonAndAddToGroup("eperson1@example.com", "Jane", "Doe", testGroup); + EPerson eperson2 = createEPerson("eperson2@example.com", "John", "Doe"); + EPerson eperson3 = createEPersonAndAddToGroup("eperson3@example.com", "John", "Smith", testGroup); + EPerson eperson4 = createEPerson("eperson4@example.com", "Doe", "Smith"); + allEPeopleAdded.addAll(Arrays.asList(eperson1, eperson2, eperson3, eperson4)); + + List allJohns = Arrays.asList(eperson2, eperson3); + List searchJohnResults = ePersonService.search(context, "John", -1, -1); + assertTrue(searchJohnResults.containsAll(allJohns)); + assertEquals(searchJohnResults.size(), ePersonService.searchResultCount(context, "John")); + + List allDoes = Arrays.asList(eperson1, eperson2, eperson4); + List searchDoeResults = ePersonService.search(context, "Doe", -1, -1); + assertTrue(searchDoeResults.containsAll(allDoes)); + assertEquals(searchDoeResults.size(), ePersonService.searchResultCount(context, "Doe")); + + List allSmiths = Arrays.asList(eperson3, eperson4); + List searchSmithResults = ePersonService.search(context, "Smith", -1, -1); + assertTrue(searchSmithResults.containsAll(allSmiths)); + assertEquals(searchSmithResults.size(), ePersonService.searchResultCount(context, "Smith")); + + // Assert search on example.com returns everyone + List searchEmailResults = ePersonService.search(context, "example.com", -1, -1); + assertTrue(searchEmailResults.containsAll(allEPeopleAdded)); + assertEquals(searchEmailResults.size(), ePersonService.searchResultCount(context, "example.com")); + + // Assert exact email search returns just one + List exactEmailResults = ePersonService.search(context, "eperson1@example.com", -1, -1); + assertTrue(exactEmailResults.contains(eperson1)); + assertEquals(exactEmailResults.size(), ePersonService.searchResultCount(context, "eperson1@example.com")); + + // Assert UUID search returns exact match + List uuidResults = ePersonService.search(context, eperson4.getID().toString(), -1, -1); + assertTrue(uuidResults.contains(eperson4)); + assertEquals(1, uuidResults.size()); + assertEquals(uuidResults.size(), ePersonService.searchResultCount(context, eperson4.getID().toString())); + } finally { + // Remove all Groups & EPersons we added for this test + context.turnOffAuthorisationSystem(); + groupService.delete(context, testGroup); + for (EPerson ePerson : allEPeopleAdded) { + ePersonService.delete(context, ePerson); + } + context.restoreAuthSystemState(); + } } -*/ /** - * Test of search method, of class EPerson. + * Test of searchNonMembers() and searchNonMembersCount() methods of EPersonService + * NOTE: Pagination is not verified here because it is tested in EPersonRestRepositoryIT */ -/* @Test - public void testSearch_4args() - throws Exception - { - System.out.println("search"); - Context context = null; - String query = ""; - int offset = 0; - int limit = 0; - EPerson[] expResult = null; - EPerson[] result = EPerson.search(context, query, offset, limit); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); - } -*/ + public void testSearchAndCountByNameEmailNonMembers() throws SQLException, AuthorizeException, IOException { + List allEPeopleAdded = new ArrayList<>(); + Group testGroup1 = createGroup("TestingGroup1"); + Group testGroup2 = createGroup("TestingGroup2"); + Group testGroup3 = createGroup("TestingGroup3"); + try { + // Create two EPersons in Group 1 + EPerson eperson1 = createEPersonAndAddToGroup("eperson1@example.com", "Jane", "Doe", testGroup1); + EPerson eperson2 = createEPersonAndAddToGroup("eperson2@example.com", "John", "Smith", testGroup1); + + // Create one more EPerson, and add it and a previous EPerson to Group 2 + EPerson eperson3 = createEPersonAndAddToGroup("eperson3@example.com", "John", "Doe", testGroup2); + context.turnOffAuthorisationSystem(); + groupService.addMember(context, testGroup2, eperson2); + groupService.update(context, testGroup2); + ePersonService.update(context, eperson2); + context.restoreAuthSystemState(); - /** - * Test of searchResultCount method, of class EPerson. - */ -/* - @Test - public void testSearchResultCount() - throws Exception - { - System.out.println("searchResultCount"); - Context context = null; - String query = ""; - int expResult = 0; - int result = EPerson.searchResultCount(context, query); - assertEquals(expResult, result); - // TODO review the generated test code and remove the default call to fail. - fail("The test case is a prototype."); + // Create 2 more EPersons with no group memberships + EPerson eperson4 = createEPerson("eperson4@example.com", "John", "Anthony"); + EPerson eperson5 = createEPerson("eperson5@example.org", "Smith", "Doe"); + allEPeopleAdded.addAll(Arrays.asList(eperson1, eperson2, eperson3, eperson4, eperson5)); + + // FIRST, test search by last name + // Verify all Does match a nonMember search of Group3 (which is an empty group) + List allDoes = Arrays.asList(eperson1, eperson3, eperson5); + List searchDoeResults = ePersonService.searchNonMembers(context, "Doe", testGroup3, -1, -1); + assertTrue(searchDoeResults.containsAll(allDoes)); + assertEquals(searchDoeResults.size(), ePersonService.searchNonMembersCount(context, "Doe", testGroup3)); + + // Verify searching "Doe" with Group 2 *excludes* the one which is already a member + List allNonMemberDoes = Arrays.asList(eperson1, eperson5); + List searchNonMemberDoeResults = ePersonService.searchNonMembers(context, "Doe", testGroup2, + -1, -1); + assertTrue(searchNonMemberDoeResults.containsAll(allNonMemberDoes)); + assertFalse(searchNonMemberDoeResults.contains(eperson3)); + assertEquals(searchNonMemberDoeResults.size(), ePersonService.searchNonMembersCount(context, "Doe", + testGroup2)); + + // Verify searching "Doe" with Group 1 *excludes* the one which is already a member + allNonMemberDoes = Arrays.asList(eperson3, eperson5); + searchNonMemberDoeResults = ePersonService.searchNonMembers(context, "Doe", testGroup1, -1, -1); + assertTrue(searchNonMemberDoeResults.containsAll(allNonMemberDoes)); + assertFalse(searchNonMemberDoeResults.contains(eperson1)); + assertEquals(searchNonMemberDoeResults.size(), ePersonService.searchNonMembersCount(context, "Doe", + testGroup1)); + + // SECOND, test search by first name + // Verify all Johns match a nonMember search of Group3 (which is an empty group) + List allJohns = Arrays.asList(eperson2, eperson3, eperson4); + List searchJohnResults = ePersonService.searchNonMembers(context, "John", + testGroup3, -1, -1); + assertTrue(searchJohnResults.containsAll(allJohns)); + assertEquals(searchJohnResults.size(), ePersonService.searchNonMembersCount(context, "John", + testGroup3)); + + // Verify searching "John" with Group 2 *excludes* the two who are already a member + List allNonMemberJohns = Arrays.asList(eperson4); + List searchNonMemberJohnResults = ePersonService.searchNonMembers(context, "John", + testGroup2, -1, -1); + assertTrue(searchNonMemberJohnResults.containsAll(allNonMemberJohns)); + assertFalse(searchNonMemberJohnResults.contains(eperson2)); + assertFalse(searchNonMemberJohnResults.contains(eperson3)); + assertEquals(searchNonMemberJohnResults.size(), ePersonService.searchNonMembersCount(context, "John", + testGroup2)); + + // FINALLY, test search by email + // Assert search on example.com excluding Group 1 returns just those not in that group + List exampleNonMembers = Arrays.asList(eperson3, eperson4); + List searchEmailResults = ePersonService.searchNonMembers(context, "example.com", + testGroup1, -1, -1); + assertTrue(searchEmailResults.containsAll(exampleNonMembers)); + assertFalse(searchEmailResults.contains(eperson1)); + assertFalse(searchEmailResults.contains(eperson2)); + assertEquals(searchEmailResults.size(), ePersonService.searchNonMembersCount(context, "example.com", + testGroup1)); + + // Assert exact email search returns just one (if not in group) + List exactEmailResults = ePersonService.searchNonMembers(context, "eperson1@example.com", + testGroup2, -1, -1); + assertTrue(exactEmailResults.contains(eperson1)); + assertEquals(exactEmailResults.size(), ePersonService.searchNonMembersCount(context, "eperson1@example.com", + testGroup2)); + // But, change the group to one they are a member of, and they won't be included + exactEmailResults = ePersonService.searchNonMembers(context, "eperson1@example.com", + testGroup1, -1, -1); + assertFalse(exactEmailResults.contains(eperson1)); + assertEquals(exactEmailResults.size(), ePersonService.searchNonMembersCount(context, "eperson1@example.com", + testGroup1)); + + // Assert UUID search returns exact match (if not in group) + List uuidResults = ePersonService.searchNonMembers(context, eperson3.getID().toString(), + testGroup1, -1, -1); + assertTrue(uuidResults.contains(eperson3)); + assertEquals(1, uuidResults.size()); + assertEquals(uuidResults.size(), ePersonService.searchNonMembersCount(context, eperson3.getID().toString(), + testGroup1)); + // But, change the group to one they are a member of, and you'll get no results + uuidResults = ePersonService.searchNonMembers(context, eperson3.getID().toString(), + testGroup2, -1, -1); + assertFalse(uuidResults.contains(eperson3)); + assertEquals(0, uuidResults.size()); + assertEquals(uuidResults.size(), ePersonService.searchNonMembersCount(context, eperson3.getID().toString(), + testGroup2)); + + } finally { + // Remove all Groups & EPersons we added for this test + context.turnOffAuthorisationSystem(); + groupService.delete(context, testGroup1); + groupService.delete(context, testGroup2); + groupService.delete(context, testGroup3); + for (EPerson ePerson : allEPeopleAdded) { + ePersonService.delete(context, ePerson); + } + context.restoreAuthSystemState(); + } } -*/ /** * Test of findAll method, of class EPerson. @@ -994,10 +1121,7 @@ public void testCascadingDeleteSubmitterPreservesWorkflowItems() wfGroup.addMember(groupMember); groupService.update(context, wfGroup); - // DSpace currently contains two workflow systems. The newer XMLWorfklow needs additional tables that are not - // part of the test database yet. While it is expected that it becomes the default workflow system (DS-2059) - // one day, this won't happen before it its backported to JSPUI (DS-2121). - // TODO: add tests using the configurable workflowsystem + // Start workflow int wfiID = workflowService.startWithoutNotify(context, wsi).getID(); context.restoreAuthSystemState(); context.commit(); @@ -1032,6 +1156,57 @@ public void testCascadingDeleteSubmitterPreservesWorkflowItems() wfi.getSubmitter()); } + @Test + public void findAndCountByGroups() throws SQLException, AuthorizeException, IOException { + // Create a group with 3 EPerson members + Group group = createGroup("parentGroup"); + EPerson eperson1 = createEPersonAndAddToGroup("test1@example.com", group); + EPerson eperson2 = createEPersonAndAddToGroup("test2@example.com", group); + EPerson eperson3 = createEPersonAndAddToGroup("test3@example.com", group); + groupService.update(context, group); + + Group group2 = null; + EPerson eperson4 = null; + + try { + // Assert that findByGroup is the same list of EPersons as getMembers() when pagination is ignored + // (NOTE: Pagination is tested in GroupRestRepositoryIT) + // NOTE: isEqualCollection() must be used for comparison because Hibernate's "PersistentBag" cannot be + // compared directly to a List. See https://stackoverflow.com/a/57399383/3750035 + assertTrue( + CollectionUtils.isEqualCollection(group.getMembers(), + ePersonService.findByGroups(context, Set.of(group), -1, -1))); + // Assert countByGroups is the same as the size of members + assertEquals(group.getMembers().size(), ePersonService.countByGroups(context, Set.of(group))); + + // Add another group with duplicate EPerson + group2 = createGroup("anotherGroup"); + groupService.addMember(context, group2, eperson1); + groupService.update(context, group2); + + // Verify countByGroups is still 3 (existing person should not be counted twice) + assertEquals(3, ePersonService.countByGroups(context, Set.of(group, group2))); + + // Add a new EPerson to new group, verify count goes up by one + eperson4 = createEPersonAndAddToGroup("test4@example.com", group2); + assertEquals(4, ePersonService.countByGroups(context, Set.of(group, group2))); + } finally { + // Clean up our data + context.turnOffAuthorisationSystem(); + groupService.delete(context, group); + if (group2 != null) { + groupService.delete(context, group2); + } + ePersonService.delete(context, eperson1); + ePersonService.delete(context, eperson2); + ePersonService.delete(context, eperson3); + if (eperson4 != null) { + ePersonService.delete(context, eperson4); + } + context.restoreAuthSystemState(); + } + } + /** * Creates an item, sets the specified submitter. * @@ -1078,4 +1253,54 @@ private WorkspaceItem prepareWorkspaceItem(EPerson submitter) context.restoreAuthSystemState(); return wsi; } + + protected Group createGroup(String name) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + Group group = groupService.create(context); + group.setName(name); + groupService.update(context, group); + context.restoreAuthSystemState(); + return group; + } + + protected EPerson createEPersonAndAddToGroup(String email, Group group) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = createEPerson(email); + groupService.addMember(context, group, ePerson); + groupService.update(context, group); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } + + protected EPerson createEPersonAndAddToGroup(String email, String firstname, String lastname, Group group) + throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = createEPerson(email, firstname, lastname); + groupService.addMember(context, group, ePerson); + groupService.update(context, group); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } + + protected EPerson createEPerson(String email) throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = ePersonService.create(context); + ePerson.setEmail(email); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } + protected EPerson createEPerson(String email, String firstname, String lastname) + throws SQLException, AuthorizeException { + context.turnOffAuthorisationSystem(); + EPerson ePerson = ePersonService.create(context); + ePerson.setEmail(email); + ePerson.setFirstName(context, firstname); + ePerson.setLastName(context, lastname); + ePersonService.update(context, ePerson); + context.restoreAuthSystemState(); + return ePerson; + } } diff --git a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java index ee9c883f1be6..fddcabe4b038 100644 --- a/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java +++ b/dspace-api/src/test/java/org/dspace/eperson/GroupTest.java @@ -10,6 +10,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -21,6 +22,7 @@ import java.util.Collections; import java.util.List; +import org.apache.commons.collections4.CollectionUtils; import org.apache.logging.log4j.Logger; import org.dspace.AbstractUnitTest; import org.dspace.authorize.AuthorizeException; @@ -604,6 +606,30 @@ public void allMembers() throws SQLException, AuthorizeException, EPersonDeletio } } + @Test + public void countAllMembers() throws SQLException, AuthorizeException, EPersonDeletionException, IOException { + List allEPeopleAdded = new ArrayList<>(); + try { + context.turnOffAuthorisationSystem(); + allEPeopleAdded.add(createEPersonAndAddToGroup("allMemberGroups1@dspace.org", topGroup)); + allEPeopleAdded.add(createEPersonAndAddToGroup("allMemberGroups2@dspace.org", level1Group)); + allEPeopleAdded.add(createEPersonAndAddToGroup("allMemberGroups3@dspace.org", level2Group)); + context.restoreAuthSystemState(); + + assertEquals(3, groupService.countAllMembers(context, topGroup)); + assertEquals(2, groupService.countAllMembers(context, level1Group)); + assertEquals(1, groupService.countAllMembers(context, level2Group)); + } finally { + // Remove all the people added (in order to not impact other tests) + context.turnOffAuthorisationSystem(); + for (EPerson ePerson : allEPeopleAdded) { + ePersonService.delete(context, ePerson); + } + context.restoreAuthSystemState(); + } + } + + @Test public void isEmpty() throws SQLException, AuthorizeException, EPersonDeletionException, IOException { assertTrue(groupService.isEmpty(topGroup)); @@ -620,6 +646,143 @@ public void isEmpty() throws SQLException, AuthorizeException, EPersonDeletionEx assertTrue(groupService.isEmpty(level2Group)); } + @Test + public void findAndCountByParent() throws SQLException, AuthorizeException, IOException { + + // Create a parent group with 3 child groups + Group parentGroup = createGroup("parentGroup"); + Group childGroup = createGroup("childGroup"); + Group child2Group = createGroup("child2Group"); + Group child3Group = createGroup("child3Group"); + groupService.addMember(context, parentGroup, childGroup); + groupService.addMember(context, parentGroup, child2Group); + groupService.addMember(context, parentGroup, child3Group); + groupService.update(context, parentGroup); + + try { + // Assert that findByParent is the same list of groups as getMemberGroups() when pagination is ignored + // (NOTE: Pagination is tested in GroupRestRepositoryIT) + // NOTE: isEqualCollection() must be used for comparison because Hibernate's "PersistentBag" cannot be + // compared directly to a List. See https://stackoverflow.com/a/57399383/3750035 + assertTrue( + CollectionUtils.isEqualCollection(parentGroup.getMemberGroups(), + groupService.findByParent(context, parentGroup, -1, -1))); + // Assert countBy parent is the same as the size of group members + assertEquals(parentGroup.getMemberGroups().size(), groupService.countByParent(context, parentGroup)); + } finally { + // Clean up our data + context.turnOffAuthorisationSystem(); + groupService.delete(context, parentGroup); + groupService.delete(context, childGroup); + groupService.delete(context, child2Group); + groupService.delete(context, child3Group); + context.restoreAuthSystemState(); + } + } + + @Test + // Tests searchNonMembers() and searchNonMembersCount() + // NOTE: This does not test pagination as that is tested in GroupRestRepositoryIT in server-webapp + public void searchAndCountNonMembers() throws SQLException, AuthorizeException, IOException { + // Create a parent group with 2 child groups + Group parentGroup = createGroup("Some Parent Group"); + Group someStaffGroup = createGroup("Some Other Staff"); + Group someStudentsGroup = createGroup("Some Students"); + groupService.addMember(context, parentGroup, someStaffGroup); + groupService.addMember(context, parentGroup, someStudentsGroup); + groupService.update(context, parentGroup); + + // Create a separate parent which is not a member of the first & add two child groups to it + Group studentsNotInParentGroup = createGroup("Students not in Parent"); + Group otherStudentsNotInParentGroup = createGroup("Other Students"); + Group someOtherStudentsNotInParentGroup = createGroup("Some Other Students"); + groupService.addMember(context, studentsNotInParentGroup, otherStudentsNotInParentGroup); + groupService.addMember(context, studentsNotInParentGroup, someOtherStudentsNotInParentGroup); + groupService.update(context, studentsNotInParentGroup); + + try { + // Assert that all Groups *not* in parent group match an empty search + List notInParent = Arrays.asList(studentsNotInParentGroup, otherStudentsNotInParentGroup, + someOtherStudentsNotInParentGroup); + List nonMembersSearch = groupService.searchNonMembers(context, "", parentGroup, -1, -1); + // NOTE: Because others unit tests create groups, this search will return an undetermined number of results. + // Therefore, we just verify that our expected groups are included and others are NOT included. + assertTrue(nonMembersSearch.containsAll(notInParent)); + // Verify it does NOT contain members of parentGroup + assertFalse(nonMembersSearch.contains(someStaffGroup)); + assertFalse(nonMembersSearch.contains(someStudentsGroup)); + // Verify it also does NOT contain the parentGroup itself + assertFalse(nonMembersSearch.contains(parentGroup)); + // Verify the count for empty search matches the size of the search results + assertEquals(nonMembersSearch.size(), groupService.searchNonMembersCount(context, "", parentGroup)); + + // Assert a search on "Students" matches all those same groups (as they all include that word in their name) + nonMembersSearch = groupService.searchNonMembers(context, "Students", parentGroup, -1, -1); + assertTrue(nonMembersSearch.containsAll(notInParent)); + //Verify an existing member group with "Students" in its name does NOT get returned + assertFalse(nonMembersSearch.contains(someStudentsGroup)); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, "Students", parentGroup)); + + + // Assert a search on "other" matches just two groups + // (this also tests search is case insensitive) + nonMembersSearch = groupService.searchNonMembers(context, "other", parentGroup, -1, -1); + assertTrue(nonMembersSearch.containsAll( + Arrays.asList(otherStudentsNotInParentGroup, someOtherStudentsNotInParentGroup))); + // Verify an existing member group with "Other" in its name does NOT get returned + assertFalse(nonMembersSearch.contains(someStaffGroup)); + assertEquals(nonMembersSearch.size(), groupService.searchNonMembersCount(context, "other", parentGroup)); + + // Assert a search on "Parent" matches just one group + nonMembersSearch = groupService.searchNonMembers(context, "Parent", parentGroup, -1, -1); + assertTrue(nonMembersSearch.contains(studentsNotInParentGroup)); + // Verify Parent Group itself does NOT get returned + assertFalse(nonMembersSearch.contains(parentGroup)); + assertEquals(nonMembersSearch.size(), groupService.searchNonMembersCount(context, "Parent", parentGroup)); + + // Assert a UUID search matching a non-member group will return just that one group + nonMembersSearch = groupService.searchNonMembers(context, + someOtherStudentsNotInParentGroup.getID().toString(), + parentGroup, -1, -1); + assertEquals(1, nonMembersSearch.size()); + assertTrue(nonMembersSearch.contains(someOtherStudentsNotInParentGroup)); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, + someOtherStudentsNotInParentGroup.getID().toString(), + parentGroup)); + + // Assert a UUID search matching an EXISTING member will return NOTHING + // (as this group is excluded from the search) + nonMembersSearch = groupService.searchNonMembers(context, someStudentsGroup.getID().toString(), + parentGroup,-1, -1); + assertEquals(0, nonMembersSearch.size()); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, someStudentsGroup.getID().toString(), + parentGroup)); + + // Assert a UUID search matching Parent Group *itself* will return NOTHING + // (as this group is excluded from the search) + nonMembersSearch = groupService.searchNonMembers(context, parentGroup.getID().toString(), + parentGroup,-1, -1); + assertEquals(0, nonMembersSearch.size()); + assertEquals(nonMembersSearch.size(), + groupService.searchNonMembersCount(context, parentGroup.getID().toString(), + parentGroup)); + } finally { + // Clean up our data + context.turnOffAuthorisationSystem(); + groupService.delete(context, parentGroup); + groupService.delete(context, someStaffGroup); + groupService.delete(context, someStudentsGroup); + groupService.delete(context, studentsNotInParentGroup); + groupService.delete(context, otherStudentsNotInParentGroup); + groupService.delete(context, someOtherStudentsNotInParentGroup); + context.restoreAuthSystemState(); + } + + } + protected Group createGroup(String name) throws SQLException, AuthorizeException { context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/eperson/SubscribeServiceIT.java b/dspace-api/src/test/java/org/dspace/eperson/SubscribeServiceIT.java new file mode 100644 index 000000000000..945dd481d00a --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/eperson/SubscribeServiceIT.java @@ -0,0 +1,417 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.eperson; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static org.dspace.builder.SubscribeBuilder.subscribeBuilder; +import static org.dspace.matcher.SubscribeMatcher.matches; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertEquals; + +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.commons.lang.StringUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.SubscribeBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.eperson.service.SubscribeService; +import org.junit.Before; +import org.junit.Test; + +public class SubscribeServiceIT extends AbstractIntegrationTestWithDatabase { + + private final SubscribeService subscribeService = ContentServiceFactory.getInstance().getSubscribeService(); + + private Collection firstCollection; + private Collection secondCollection; + + @Before + public void init() throws Exception { + context.turnOffAuthorisationSystem(); + Community parentCommunity = CommunityBuilder.createCommunity(context).build(); + firstCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("First Collection").build(); + secondCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Second Collection").build(); + context.restoreAuthSystemState(); + } + + @Test + public void findAllWithoutAndWithLimit() throws Exception { + + String resourceType = "Collection"; + + EPerson subscribingUser = context.getCurrentUser(); + + createSubscription("content", firstCollection, subscribingUser, weekly()); + createSubscription("content", secondCollection, subscribingUser, daily(), annual()); + + // unlimited search returns all subscriptions + + List subscriptions = subscribeService.findAll(context, resourceType, 10, 0); + assertThat(subscriptions, containsInAnyOrder( + asList(matches(firstCollection, subscribingUser, "content", + singletonList(weekly())), + matches(secondCollection, subscribingUser, "content", + asList(daily(), annual()))))); + + // limited search returns first + + subscriptions = subscribeService.findAll(context, resourceType, 1, 0); + + assertThat(subscriptions, containsInAnyOrder( + singletonList(matches(firstCollection, subscribingUser, "content", + singletonList(weekly()))))); + + // search with offset returns second + + subscriptions = subscribeService.findAll(context, resourceType, 100, 1); + + assertThat(subscriptions, containsInAnyOrder( + singletonList(matches(secondCollection, subscribingUser, "content", + asList(daily(), annual()))))); + + // lookup without resource type + subscriptions = subscribeService.findAll(context, StringUtils.EMPTY, 100, 0); + + assertThat(subscriptions, containsInAnyOrder( + asList(matches(firstCollection, subscribingUser, "content", + singletonList(weekly())), + matches(secondCollection, subscribingUser, "content", + asList(daily(), annual()))))); + + } + + private static SubscriptionParameter annual() { + return createSubscriptionParameter("frequency", "A"); + } + + private static SubscriptionParameter daily() { + return createSubscriptionParameter("frequency", "D"); + } + + @Test(expected = Exception.class) + public void findAllWithInvalidResource() throws Exception { + + String resourceType = "INVALID"; + Integer limit = 10; + Integer offset = 0; + + createSubscription("content", firstCollection, context.getCurrentUser(), + weekly()); + + subscribeService.findAll(context, resourceType, limit, offset); + + } + + @Test + public void newSubscriptionCreatedByAdmin() throws Exception { + + SubscriptionParameter monthly = createSubscriptionParameter("frequency", "M"); + + List parameters = Collections.singletonList( + monthly); + + EPerson currentUser = context.getCurrentUser(); + context.setCurrentUser(admin); + Subscription subscription = subscribeService.subscribe(context, eperson, + firstCollection, parameters, "content"); + + assertThat(subscription, is(matches(firstCollection, eperson, + "content", singletonList(monthly)))); + + SubscribeBuilder.deleteSubscription(subscription.getID()); + context.setCurrentUser(currentUser); + + } + + @Test + public void newSubscriptionCreatedByCurrentUser() throws Exception { + + EPerson currentUser = context.getCurrentUser(); + Subscription subscription = subscribeService.subscribe(context, currentUser, + secondCollection, + asList(daily(), weekly()), "content"); + + assertThat(subscription, matches(secondCollection, currentUser, "content", + asList(daily(), weekly()))); + + SubscribeBuilder.deleteSubscription(subscription.getID()); + } + + @Test(expected = AuthorizeException.class) + public void nonAdminDifferentUserTriesToSubscribe() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson notAdmin = EPersonBuilder.createEPerson(context).withEmail("not-admin@example.com").build(); + context.restoreAuthSystemState(); + EPerson currentUser = context.getCurrentUser(); + context.setCurrentUser(notAdmin); + try { + subscribeService.subscribe(context, admin, firstCollection, + singletonList( + daily()), "content"); + } finally { + context.setCurrentUser(currentUser); + } + + } + + @Test + public void unsubscribeByAdmin() throws Exception { + + EPerson subscribingUser = context.getCurrentUser(); + createSubscription("content", secondCollection, subscribingUser, + weekly()); + + List subscriptions = + subscribeService.findSubscriptionsByEPersonAndDso(context, subscribingUser, + secondCollection, 100, 0); + + assertEquals(subscriptions.size(), 1); + + context.setCurrentUser(admin); + subscribeService.unsubscribe(context, subscribingUser, secondCollection); + context.setCurrentUser(subscribingUser); + + subscriptions = + subscribeService.findSubscriptionsByEPersonAndDso(context, subscribingUser, + secondCollection, 100, 0); + + assertEquals(subscriptions.size(), 0); + } + + @Test + public void subscribingUserUnsubscribesTheirSubscription() throws Exception { + + EPerson subscribingUser = context.getCurrentUser(); + createSubscription("content", secondCollection, subscribingUser, + weekly()); + + List subscriptions = + subscribeService.findSubscriptionsByEPersonAndDso(context, subscribingUser, + secondCollection, 100, 0); + + assertEquals(subscriptions.size(), 1); + + + subscribeService.unsubscribe(context, subscribingUser, secondCollection); + + subscriptions = + subscribeService.findSubscriptionsByEPersonAndDso(context, subscribingUser, + secondCollection, 100, 0); + + assertEquals(subscriptions.size(), 0); + } + + @Test(expected = AuthorizeException.class) + public void nonAdminDifferentUserTriesToUnSubscribeAnotherUser() throws Exception { + EPerson subscribingUser = context.getCurrentUser(); + Subscription subscription = createSubscription("content", secondCollection, subscribingUser, + weekly()); + + context.turnOffAuthorisationSystem(); + EPerson nonAdmin = EPersonBuilder.createEPerson(context).build(); + context.restoreAuthSystemState(); + + + try { + context.setCurrentUser(nonAdmin); + subscribeService.unsubscribe(context, subscribingUser, secondCollection); + } finally { + context.setCurrentUser(subscribingUser); + SubscribeBuilder.deleteSubscription(subscription.getID()); + } + + } + + @Test + public void updateSubscription() throws Exception { + + EPerson currentUser = context.getCurrentUser(); + Subscription subscription = createSubscription("original", + firstCollection, currentUser, + createSubscriptionParameter("frequency", "M")); + + String updatedType = "updated"; + List updatedParameters = Collections.singletonList( + annual() + ); + + try { + Subscription updated = subscribeService.updateSubscription(context, subscription.getID(), + updatedType, updatedParameters); + + assertThat(updated, is(matches(firstCollection, currentUser, updatedType, updatedParameters))); + + List subscriptions = + subscribeService.findSubscriptionsByEPersonAndDso(context, currentUser, firstCollection, 10, 0); + + assertThat(subscriptions, contains( + matches(firstCollection, currentUser, updatedType, updatedParameters))); + + } finally { + SubscribeBuilder.deleteSubscription(subscription.getID()); + } + + } + + @Test + public void parametersAdditionAndRemoval() throws Exception { + + SubscriptionParameter firstParameter = createSubscriptionParameter("key1", "value1"); + SubscriptionParameter secondParameter = createSubscriptionParameter("key2", "value2"); + + EPerson currentUser = context.getCurrentUser(); + Subscription subscription = createSubscription("type", secondCollection, currentUser, + firstParameter, secondParameter); + int subscriptionId = subscription.getID(); + + SubscriptionParameter addedParameter = createSubscriptionParameter("added", "add"); + + + try { + Subscription updatedSubscription = subscribeService.addSubscriptionParameter(context, subscriptionId, + addedParameter); + assertThat(updatedSubscription, is(matches(secondCollection, currentUser, "type", + asList(firstParameter, secondParameter, addedParameter)))); + updatedSubscription = subscribeService.removeSubscriptionParameter(context, subscriptionId, + secondParameter); + assertThat(updatedSubscription, is(matches(secondCollection, currentUser, "type", + asList(firstParameter, addedParameter)))); + } finally { + SubscribeBuilder.deleteSubscription(subscriptionId); + } + } + + @Test + public void findersAndDeletionsTest() throws SQLException { + // method to test all find and delete methods exposed by SubscribeService + context.turnOffAuthorisationSystem(); + EPerson firstSubscriber = EPersonBuilder.createEPerson(context).withEmail("first-user@example.com").build(); + EPerson secondSubscriber = EPersonBuilder.createEPerson(context).withEmail("second-user@example.com").build(); + EPerson thirdSubscriber = EPersonBuilder.createEPerson(context).withEmail("third-user@example.com").build(); + context.restoreAuthSystemState(); + + EPerson currentUser = context.getCurrentUser(); + try { + context.setCurrentUser(firstSubscriber); + createSubscription("type1", firstCollection, firstSubscriber, daily(), + weekly()); + createSubscription("type1", secondCollection, firstSubscriber, + daily(), + annual()); + createSubscription("type2", secondCollection, firstSubscriber, + daily()); + + context.setCurrentUser(secondSubscriber); + createSubscription("type1", firstCollection, secondSubscriber, + daily()); + createSubscription("type1", secondCollection, secondSubscriber, + daily(), + annual()); + + context.setCurrentUser(thirdSubscriber); + createSubscription("type1", firstCollection, thirdSubscriber, daily()); + createSubscription("type1", secondCollection, thirdSubscriber, + daily(), + annual()); + + } finally { + context.setCurrentUser(currentUser); + } + + List firstUserSubscriptions = + subscribeService.findSubscriptionsByEPerson(context, firstSubscriber, 100, 0); + + assertThat(firstUserSubscriptions, containsInAnyOrder( + matches(firstCollection, firstSubscriber, "type1", asList(daily(), + weekly())), + matches(secondCollection, firstSubscriber, "type1", asList(daily(), + annual())), + matches(secondCollection, firstSubscriber, "type2", singletonList( + daily())) + )); + + List firstUserSubscriptionsLimited = + subscribeService.findSubscriptionsByEPerson(context, firstSubscriber, 1, 0); + + assertThat(firstUserSubscriptionsLimited.size(), is(1)); + + List firstUserSubscriptionsWithOffset = + subscribeService.findSubscriptionsByEPerson(context, firstSubscriber, 100, 1); + + assertThat(firstUserSubscriptionsWithOffset.size(), is(2)); + + subscribeService.deleteByEPerson(context, firstSubscriber); + assertThat(subscribeService.findSubscriptionsByEPerson(context, firstSubscriber, 100, 0), + is(List.of())); + + List secondSubscriberSecondCollectionSubscriptions = + subscribeService.findSubscriptionsByEPersonAndDso(context, secondSubscriber, firstCollection, 10, 0); + + assertThat(secondSubscriberSecondCollectionSubscriptions, contains( + matches(firstCollection, secondSubscriber, "type1", singletonList(daily())) + )); + + List byTypeAndFrequency = + subscribeService.findAllSubscriptionsBySubscriptionTypeAndFrequency(context, "type1", + "D"); + assertThat(byTypeAndFrequency, containsInAnyOrder( + matches(firstCollection, secondSubscriber, "type1", singletonList( + daily())), + matches(secondCollection, secondSubscriber, "type1", asList(daily(), + annual())), + matches(firstCollection, thirdSubscriber, "type1", singletonList( + daily())), + matches(secondCollection, thirdSubscriber, "type1", asList(daily(), + annual())) + )); + + assertThat(subscribeService.countAll(context), is(4L)); + assertThat(subscribeService.countByEPersonAndDSO(context, secondSubscriber, secondCollection), is(1L)); + assertThat(subscribeService.countSubscriptionsByEPerson(context, thirdSubscriber), is(2L)); + + + } + + private static SubscriptionParameter weekly() { + return createSubscriptionParameter("frequency", "W"); + } + + private Subscription createSubscription(String type, DSpaceObject dso, EPerson ePerson, + SubscriptionParameter... parameters) { + return subscribeBuilder(context, type, + dso, ePerson, + Arrays.stream(parameters).collect(Collectors.toList())).build(); + } + + + private static SubscriptionParameter createSubscriptionParameter(String name, String value) { + SubscriptionParameter parameter = new SubscriptionParameter(); + parameter.setName(name); + parameter.setValue(value); + return parameter; + } + +} \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/external/MockOpenaireRestConnector.java b/dspace-api/src/test/java/org/dspace/external/MockOpenaireRestConnector.java new file mode 100644 index 000000000000..c67402dfdcc6 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/external/MockOpenaireRestConnector.java @@ -0,0 +1,63 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external; + +import java.io.InputStream; +import javax.xml.bind.JAXBException; + +import eu.openaire.jaxb.helper.OpenAIREHandler; +import eu.openaire.jaxb.model.Response; + +/** + * Mock the Openaire rest connector for unit testing
    + * will be resolved against static test xml files + * + * @author pgraca + * + */ +public class MockOpenaireRestConnector extends OpenaireRestConnector { + + public MockOpenaireRestConnector(String url) { + super(url); + } + + @Override + public Response searchProjectByKeywords(int page, int size, String... keywords) { + try { + return OpenAIREHandler.unmarshal(this.getClass().getResourceAsStream("openaire-projects.xml")); + } catch (JAXBException e) { + e.printStackTrace(); + } + return null; + } + + @Override + public Response searchProjectByIDAndFunder(String projectID, String projectFunder, int page, int size) { + try { + return OpenAIREHandler.unmarshal(this.getClass().getResourceAsStream("openaire-project.xml")); + } catch (JAXBException e) { + e.printStackTrace(); + } + return null; + } + + @Override + public Response search(String path, int page, int size) { + try { + return OpenAIREHandler.unmarshal(this.getClass().getResourceAsStream("openaire-no-projects.xml")); + } catch (JAXBException e) { + e.printStackTrace(); + } + return null; + } + + @Override + public InputStream get(String file, String accessToken) { + return this.getClass().getResourceAsStream("openaire-no-projects.xml"); + } +} diff --git a/dspace-api/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java b/dspace-api/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java index e5a86f1f56ba..cdfbf1e9435a 100644 --- a/dspace-api/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java +++ b/dspace-api/src/test/java/org/dspace/external/provider/impl/MockDataProvider.java @@ -17,9 +17,9 @@ import org.apache.commons.lang3.StringUtils; import org.dspace.content.dto.MetadataValueDTO; import org.dspace.external.model.ExternalDataObject; -import org.dspace.external.provider.ExternalDataProvider; +import org.dspace.external.provider.AbstractExternalDataProvider; -public class MockDataProvider implements ExternalDataProvider { +public class MockDataProvider extends AbstractExternalDataProvider { private Map mockLookupMap; private String sourceIdentifier; diff --git a/dspace-api/src/test/java/org/dspace/external/provider/impl/MockPubmedImportMetadataSourceServiceImpl.java b/dspace-api/src/test/java/org/dspace/external/provider/impl/MockPubmedImportMetadataSourceServiceImpl.java deleted file mode 100644 index a143ed7d34bb..000000000000 --- a/dspace-api/src/test/java/org/dspace/external/provider/impl/MockPubmedImportMetadataSourceServiceImpl.java +++ /dev/null @@ -1,85 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.external.provider.impl; - -import static org.mockito.Mockito.when; - -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Reader; -import java.io.UncheckedIOException; -import javax.ws.rs.client.Invocation; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl; -import org.mockito.ArgumentCaptor; -import org.mockito.ArgumentMatchers; -import org.mockito.Mockito; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; -import org.springframework.util.FileCopyUtils; - -/** - * we override the init method to mock the rest call to pubmed the following - * mock definitions will allow to answer to efetch or esearch requests using the - * test resource files (pubmed-esearch.fcgi.xml or pubmed-efetch.fcgi.xml) - * - * @author Andrea Bollini (andrea.bollini at 4science.it) - * - */ -public class MockPubmedImportMetadataSourceServiceImpl extends PubmedImportMetadataSourceServiceImpl { - - @Override - public void init() throws Exception { - pubmedWebTarget = Mockito.mock(WebTarget.class); - ArgumentCaptor valueCapture = ArgumentCaptor.forClass(String.class); - when(pubmedWebTarget.queryParam(ArgumentMatchers.any(), ArgumentMatchers.any())) - .thenAnswer(new Answer() { - @Override - public WebTarget answer(InvocationOnMock invocation) throws Throwable { - return pubmedWebTarget; - } - }); - when(pubmedWebTarget.path(valueCapture.capture())).thenAnswer(new Answer() { - @Override - public WebTarget answer(InvocationOnMock invocation) throws Throwable { - return pubmedWebTarget; - } - }); - when(pubmedWebTarget.request(ArgumentMatchers.any(MediaType.class))) - .thenAnswer(new Answer() { - public Invocation.Builder answer(InvocationOnMock invocation) throws Throwable { - Invocation.Builder builder = Mockito.mock(Invocation.Builder.class); - when(builder.get()).thenAnswer(new Answer() { - @Override - public Response answer(InvocationOnMock invocation) throws Throwable { - Response response = Mockito.mock(Response.class); - when(response.readEntity(ArgumentMatchers.eq(String.class))).then(new Answer() { - @Override - public String answer(InvocationOnMock invocation) throws Throwable { - String resourceName = "pubmed-" + valueCapture.getValue() + ".xml"; - InputStream resource = getClass().getResourceAsStream(resourceName); - try (Reader reader = new InputStreamReader(resource)) { - return FileCopyUtils.copyToString(reader); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - }); - return response; - } - }); - return builder; - }; - }); - } - -} diff --git a/dspace-api/src/test/java/org/dspace/external/provider/impl/OpenaireFundingDataProviderTest.java b/dspace-api/src/test/java/org/dspace/external/provider/impl/OpenaireFundingDataProviderTest.java new file mode 100644 index 000000000000..d14dc990353d --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/external/provider/impl/OpenaireFundingDataProviderTest.java @@ -0,0 +1,105 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external.provider.impl; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.util.List; +import java.util.Optional; + +import org.dspace.AbstractDSpaceTest; +import org.dspace.external.factory.ExternalServiceFactory; +import org.dspace.external.model.ExternalDataObject; +import org.dspace.external.provider.ExternalDataProvider; +import org.dspace.external.service.ExternalDataService; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit tests for OpenaireFundingDataProvider + * + * @author pgraca + * + */ +public class OpenaireFundingDataProviderTest extends AbstractDSpaceTest { + + ExternalDataService externalDataService; + ExternalDataProvider openaireFundingDataProvider; + + /** + * This method will be run before every test as per @Before. It will initialize + * resources required for each individual unit test. + * + * Other methods can be annotated with @Before here or in subclasses but no + * execution order is guaranteed + */ + @Before + public void init() { + // Set up External Service Factory and set data providers + externalDataService = ExternalServiceFactory.getInstance().getExternalDataService(); + openaireFundingDataProvider = externalDataService.getExternalDataProvider("openaireFunding"); + } + + @Test + public void testNumberOfResultsWSingleKeyword() { + assertNotNull("openaireFundingDataProvider is not null", openaireFundingDataProvider); + assertEquals("openaireFunding.numberOfResults.query:mock", 77, + openaireFundingDataProvider.getNumberOfResults("mock")); + } + + @Test + public void testNumberOfResultsWKeywords() { + assertNotNull("openaireFundingDataProvider is not null", openaireFundingDataProvider); + assertEquals("openaireFunding.numberOfResults.query:mock+test", 77, + openaireFundingDataProvider.getNumberOfResults("mock+test")); + } + + @Test + public void testQueryResultsWSingleKeyword() { + assertNotNull("openaireFundingDataProvider is not null", openaireFundingDataProvider); + List results = openaireFundingDataProvider.searchExternalDataObjects("mock", 0, 10); + assertEquals("openaireFunding.searchExternalDataObjects.size", 10, results.size()); + } + + @Test + public void testQueryResultsWKeywords() { + String value = "Mushroom Robo-Pic - Development of an autonomous robotic mushroom picking system"; + + assertNotNull("openaireFundingDataProvider is not null", openaireFundingDataProvider); + List results = openaireFundingDataProvider.searchExternalDataObjects("mock+test", 0, 10); + assertEquals("openaireFunding.searchExternalDataObjects.size", 10, results.size()); + assertTrue("openaireFunding.searchExternalDataObjects.first.value", value.equals(results.get(0).getValue())); + } + + @Test + public void testGetDataObject() { + String id = "aW5mbzpldS1yZXBvL2dyYW50QWdyZWVtZW50L0ZDVC81ODc2LVBQQ0RUSS8xMTAwNjIvUFQ="; + String value = "Portuguese Wild Mushrooms: Chemical characterization and functional study" + + " of antiproliferative and proapoptotic properties in cancer cell lines"; + + assertNotNull("openaireFundingDataProvider is not null", openaireFundingDataProvider); + + Optional result = openaireFundingDataProvider.getExternalDataObject(id); + + assertTrue("openaireFunding.getExternalDataObject.exists", result.isPresent()); + assertTrue("openaireFunding.getExternalDataObject.value", value.equals(result.get().getValue())); + } + + @Test + public void testGetDataObjectWInvalidId() { + String id = "WRONGID"; + + assertNotNull("openaireFundingDataProvider is not null", openaireFundingDataProvider); + + Optional result = openaireFundingDataProvider.getExternalDataObject(id); + + assertTrue("openaireFunding.getExternalDataObject.notExists:WRONGID", result.isEmpty()); + } +} diff --git a/dspace-api/src/test/java/org/dspace/external/provider/impl/OrcidPublicationDataProviderIT.java b/dspace-api/src/test/java/org/dspace/external/provider/impl/OrcidPublicationDataProviderIT.java new file mode 100644 index 000000000000..dae14115b8e0 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/external/provider/impl/OrcidPublicationDataProviderIT.java @@ -0,0 +1,434 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.external.provider.impl; + +import static java.util.Optional.of; +import static org.dspace.app.matcher.LambdaMatcher.has; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.net.URL; +import java.util.List; +import java.util.Optional; +import java.util.function.Predicate; +import javax.xml.bind.JAXBContext; +import javax.xml.bind.Unmarshaller; + +import org.apache.commons.codec.binary.StringUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.OrcidTokenBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.dto.MetadataValueDTO; +import org.dspace.external.model.ExternalDataObject; +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidConfiguration; +import org.dspace.orcid.model.OrcidTokenResponseDTO; +import org.dspace.utils.DSpace; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkBulk; +import org.orcid.jaxb.model.v3.release.record.summary.Works; + +/** + * Integration tests for {@link OrcidPublicationDataProvider}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidPublicationDataProviderIT extends AbstractIntegrationTestWithDatabase { + + private static final String BASE_XML_DIR_PATH = "org/dspace/app/orcid-works/"; + + private static final String ACCESS_TOKEN = "32c83ccb-c6d5-4981-b6ea-6a34a36de8ab"; + + private static final String ORCID = "0000-1111-2222-3333"; + + private OrcidPublicationDataProvider dataProvider; + + private OrcidConfiguration orcidConfiguration; + + private OrcidClient orcidClient; + + private OrcidClient orcidClientMock; + + private String originalClientId; + + private Collection persons; + + @Before + public void setup() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + persons = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("Person") + .withName("Profiles") + .build(); + + context.restoreAuthSystemState(); + + dataProvider = new DSpace().getServiceManager() + .getServiceByName("orcidPublicationDataProvider", OrcidPublicationDataProvider.class); + + orcidConfiguration = new DSpace().getServiceManager() + .getServiceByName("org.dspace.orcid.client.OrcidConfiguration", OrcidConfiguration.class); + + orcidClientMock = mock(OrcidClient.class); + orcidClient = dataProvider.getOrcidClient(); + + dataProvider.setReadPublicAccessToken(null); + dataProvider.setOrcidClient(orcidClientMock); + + originalClientId = orcidConfiguration.getClientId(); + orcidConfiguration.setClientId("DSPACE-CLIENT-ID"); + orcidConfiguration.setClientSecret("DSPACE-CLIENT-SECRET"); + + when(orcidClientMock.getReadPublicAccessToken()).thenReturn(buildTokenResponse(ACCESS_TOKEN)); + + when(orcidClientMock.getWorks(any(), eq(ORCID))).thenReturn(unmarshall("works.xml", Works.class)); + when(orcidClientMock.getWorks(eq(ORCID))).thenReturn(unmarshall("works.xml", Works.class)); + + when(orcidClientMock.getObject(any(), eq(ORCID), any(), eq(Work.class))) + .then((invocation) -> of(unmarshall("work-" + invocation.getArgument(2) + ".xml", Work.class))); + when(orcidClientMock.getObject(eq(ORCID), any(), eq(Work.class))) + .then((invocation) -> of(unmarshall("work-" + invocation.getArgument(1) + ".xml", Work.class))); + + when(orcidClientMock.getWorkBulk(any(), eq(ORCID), any())) + .then((invocation) -> unmarshallWorkBulk(invocation.getArgument(2))); + when(orcidClientMock.getWorkBulk(eq(ORCID), any())) + .then((invocation) -> unmarshallWorkBulk(invocation.getArgument(1))); + + } + + @After + public void after() { + dataProvider.setOrcidClient(orcidClient); + orcidConfiguration.setClientId(originalClientId); + } + + @Test + public void testSearchWithoutPagination() throws Exception { + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + ExternalDataObject firstObject = externalObjects.get(0); + assertThat(firstObject.getDisplayValue(), is("The elements of style and the survey of ophthalmology.")); + assertThat(firstObject.getValue(), is("The elements of style and the survey of ophthalmology.")); + assertThat(firstObject.getId(), is(ORCID + "::277904")); + assertThat(firstObject.getSource(), is("orcidWorks")); + + List metadata = firstObject.getMetadata(); + assertThat(metadata, hasSize(7)); + assertThat(metadata, has(metadata("dc.date.issued", "2011"))); + assertThat(metadata, has(metadata("dc.source", "Test Journal"))); + assertThat(metadata, has(metadata("dc.language.iso", "it"))); + assertThat(metadata, has(metadata("dc.type", "Other"))); + assertThat(metadata, has(metadata("dc.identifier.doi", "10.11234.12"))); + assertThat(metadata, has(metadata("dc.contributor.author", "Walter White"))); + assertThat(metadata, has(metadata("dc.title", "The elements of style and the survey of ophthalmology."))); + + ExternalDataObject secondObject = externalObjects.get(1); + assertThat(secondObject.getDisplayValue(), is("Another cautionary tale.")); + assertThat(secondObject.getValue(), is("Another cautionary tale.")); + assertThat(secondObject.getId(), is(ORCID + "::277902")); + assertThat(secondObject.getSource(), is("orcidWorks")); + + metadata = secondObject.getMetadata(); + assertThat(metadata, hasSize(8)); + assertThat(metadata, has(metadata("dc.date.issued", "2011-05-01"))); + assertThat(metadata, has(metadata("dc.description.abstract", "Short description"))); + assertThat(metadata, has(metadata("dc.relation.ispartof", "Journal title"))); + assertThat(metadata, has(metadata("dc.contributor.author", "Walter White"))); + assertThat(metadata, has(metadata("dc.contributor.author", "John White"))); + assertThat(metadata, has(metadata("dc.contributor.editor", "Jesse Pinkman"))); + assertThat(metadata, has(metadata("dc.title", "Another cautionary tale."))); + assertThat(metadata, has(metadata("dc.type", "Article"))); + + ExternalDataObject thirdObject = externalObjects.get(2); + assertThat(thirdObject.getDisplayValue(), is("Branch artery occlusion in a young woman.")); + assertThat(thirdObject.getValue(), is("Branch artery occlusion in a young woman.")); + assertThat(thirdObject.getId(), is(ORCID + "::277871")); + assertThat(thirdObject.getSource(), is("orcidWorks")); + + metadata = thirdObject.getMetadata(); + assertThat(metadata, hasSize(3)); + assertThat(metadata, has(metadata("dc.date.issued", "1985-07-01"))); + assertThat(metadata, has(metadata("dc.title", "Branch artery occlusion in a young woman."))); + assertThat(metadata, has(metadata("dc.type", "Article"))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getWorkBulk(ACCESS_TOKEN, ORCID, List.of("277904", "277902", "277871")); + verifyNoMoreInteractions(orcidClientMock); + + } + + @Test + public void testSearchWithInvalidOrcidId() { + + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + () -> dataProvider.searchExternalDataObjects("0000-1111-2222", 0, -1)); + + assertThat(exception.getMessage(), is("The given ORCID ID is not valid: 0000-1111-2222")); + + } + + @Test + public void testSearchWithStoredAccessToken() throws Exception { + + context.turnOffAuthorisationSystem(); + + String accessToken = "95cb5ed9-c208-4bbc-bc99-aa0bd76e4452"; + + Item profile = ItemBuilder.createItem(context, persons) + .withTitle("Profile") + .withOrcidIdentifier(ORCID) + .withDspaceObjectOwner(eperson.getEmail(), eperson.getID().toString()) + .build(); + + OrcidTokenBuilder.create(context, eperson, accessToken) + .withProfileItem(profile) + .build(); + + context.restoreAuthSystemState(); + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + verify(orcidClientMock).getWorks(accessToken, ORCID); + verify(orcidClientMock).getWorkBulk(accessToken, ORCID, List.of("277904", "277902", "277871")); + verifyNoMoreInteractions(orcidClientMock); + } + + @Test + public void testSearchWithProfileWithoutAccessToken() throws Exception { + + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, persons) + .withTitle("Profile") + .withOrcidIdentifier(ORCID) + .build(); + + context.restoreAuthSystemState(); + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getWorkBulk(ACCESS_TOKEN, ORCID, List.of("277904", "277902", "277871")); + verifyNoMoreInteractions(orcidClientMock); + } + + @Test + public void testSearchWithoutResults() throws Exception { + + String unknownOrcid = "1111-2222-3333-4444"; + when(orcidClientMock.getWorks(ACCESS_TOKEN, unknownOrcid)).thenReturn(new Works()); + + List externalObjects = dataProvider.searchExternalDataObjects(unknownOrcid, 0, -1); + assertThat(externalObjects, empty()); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).getWorks(ACCESS_TOKEN, unknownOrcid); + verifyNoMoreInteractions(orcidClientMock); + } + + @Test + public void testClientCredentialsTokenCache() throws Exception { + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + verify(orcidClientMock).getReadPublicAccessToken(); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + verify(orcidClientMock, times(1)).getReadPublicAccessToken(); + + dataProvider.setReadPublicAccessToken(null); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + verify(orcidClientMock, times(2)).getReadPublicAccessToken(); + + } + + @Test + public void testSearchPagination() throws Exception { + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277904")))); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277902")))); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277871")))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getWorkBulk(ACCESS_TOKEN, ORCID, List.of("277904", "277902", "277871")); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, 5); + assertThat(externalObjects, hasSize(3)); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277904")))); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277902")))); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277871")))); + + verify(orcidClientMock, times(2)).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock, times(2)).getWorkBulk(ACCESS_TOKEN, ORCID, List.of("277904", "277902", "277871")); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, 2); + assertThat(externalObjects, hasSize(2)); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277904")))); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277902")))); + + verify(orcidClientMock, times(3)).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getWorkBulk(ACCESS_TOKEN, ORCID, List.of("277904", "277902")); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 1, 1); + assertThat(externalObjects, hasSize(1)); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277902")))); + + verify(orcidClientMock, times(4)).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getObject(ACCESS_TOKEN, ORCID, "277902", Work.class); + + externalObjects = dataProvider.searchExternalDataObjects(ORCID, 2, 1); + assertThat(externalObjects, hasSize(1)); + assertThat(externalObjects, has((externalObject -> externalObject.getId().equals(ORCID + "::277871")))); + + verify(orcidClientMock, times(5)).getWorks(ACCESS_TOKEN, ORCID); + verify(orcidClientMock).getObject(ACCESS_TOKEN, ORCID, "277871", Work.class); + + verifyNoMoreInteractions(orcidClientMock); + + } + + @Test + public void testGetExternalDataObject() { + Optional optional = dataProvider.getExternalDataObject(ORCID + "::277902"); + assertThat(optional.isPresent(), is(true)); + + ExternalDataObject externalDataObject = optional.get(); + assertThat(externalDataObject.getDisplayValue(), is("Another cautionary tale.")); + assertThat(externalDataObject.getValue(), is("Another cautionary tale.")); + assertThat(externalDataObject.getId(), is(ORCID + "::277902")); + assertThat(externalDataObject.getSource(), is("orcidWorks")); + + List metadata = externalDataObject.getMetadata(); + assertThat(metadata, hasSize(8)); + assertThat(metadata, has(metadata("dc.date.issued", "2011-05-01"))); + assertThat(metadata, has(metadata("dc.description.abstract", "Short description"))); + assertThat(metadata, has(metadata("dc.relation.ispartof", "Journal title"))); + assertThat(metadata, has(metadata("dc.contributor.author", "Walter White"))); + assertThat(metadata, has(metadata("dc.contributor.author", "John White"))); + assertThat(metadata, has(metadata("dc.contributor.editor", "Jesse Pinkman"))); + assertThat(metadata, has(metadata("dc.title", "Another cautionary tale."))); + assertThat(metadata, has(metadata("dc.type", "Article"))); + + verify(orcidClientMock).getReadPublicAccessToken(); + verify(orcidClientMock).getObject(ACCESS_TOKEN, ORCID, "277902", Work.class); + verifyNoMoreInteractions(orcidClientMock); + } + + @Test + public void testGetExternalDataObjectWithInvalidOrcidId() { + + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + () -> dataProvider.getExternalDataObject("invalid::277902")); + + assertThat(exception.getMessage(), is("The given ORCID ID is not valid: invalid" )); + } + + @Test + public void testGetExternalDataObjectWithInvalidId() { + + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + () -> dataProvider.getExternalDataObject("id")); + + assertThat(exception.getMessage(), is("Invalid identifier 'id', expected ::")); + } + + @Test + public void testSearchWithoutApiKeysConfigured() throws Exception { + + context.turnOffAuthorisationSystem(); + + orcidConfiguration.setClientSecret(null); + + ItemBuilder.createItem(context, persons) + .withTitle("Profile") + .withOrcidIdentifier(ORCID) + .build(); + + context.restoreAuthSystemState(); + + List externalObjects = dataProvider.searchExternalDataObjects(ORCID, 0, -1); + assertThat(externalObjects, hasSize(3)); + + verify(orcidClientMock).getWorks(ORCID); + verify(orcidClientMock).getWorkBulk(ORCID, List.of("277904", "277902", "277871")); + verifyNoMoreInteractions(orcidClientMock); + } + + private Predicate metadata(String metadataField, String value) { + MetadataFieldName metadataFieldName = new MetadataFieldName(metadataField); + return metadata(metadataFieldName.schema, metadataFieldName.element, metadataFieldName.qualifier, value); + } + + private Predicate metadata(String schema, String element, String qualifier, String value) { + return dto -> StringUtils.equals(schema, dto.getSchema()) + && StringUtils.equals(element, dto.getElement()) + && StringUtils.equals(qualifier, dto.getQualifier()) + && StringUtils.equals(value, dto.getValue()); + } + + private OrcidTokenResponseDTO buildTokenResponse(String accessToken) { + OrcidTokenResponseDTO response = new OrcidTokenResponseDTO(); + response.setAccessToken(accessToken); + return response; + } + + private WorkBulk unmarshallWorkBulk(List putCodes) throws Exception { + return unmarshall("workBulk-" + String.join("-", putCodes) + ".xml", WorkBulk.class); + } + + @SuppressWarnings("unchecked") + private T unmarshall(String fileName, Class clazz) throws Exception { + JAXBContext jaxbContext = JAXBContext.newInstance(clazz); + Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); + URL resource = getClass().getClassLoader().getResource(BASE_XML_DIR_PATH + fileName); + if (resource == null) { + throw new IllegalStateException("No resource found named " + BASE_XML_DIR_PATH + fileName); + } + return (T) unmarshaller.unmarshal(new File(resource.getFile())); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilderTest.java b/dspace-api/src/test/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilderTest.java new file mode 100644 index 000000000000..1a5d0b4eb3fd --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/google/client/GoogleAnalytics4ClientRequestBuilderTest.java @@ -0,0 +1,220 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import static java.util.List.of; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.List; + +import org.dspace.google.GoogleAnalyticsEvent; +import org.dspace.services.ConfigurationService; +import org.json.JSONArray; +import org.json.JSONObject; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit tests for {@link GoogleAnalytics4ClientRequestBuilder}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GoogleAnalytics4ClientRequestBuilderTest { + + private GoogleAnalytics4ClientRequestBuilder requestBuilder; + + private ConfigurationService configurationService = mock(ConfigurationService.class); + + @Before + public void setup() { + requestBuilder = new GoogleAnalytics4ClientRequestBuilder("https://google-analytics/test"); + requestBuilder.setConfigurationService(configurationService); + } + + @Test + public void testGetEndpointUrl() { + + when(configurationService.getProperty("google.analytics.api-secret")).thenReturn("abc123"); + + String endpointUrl = requestBuilder.getEndpointUrl("G-12345"); + assertThat(endpointUrl, is("https://google-analytics/test?api_secret=abc123&measurement_id=G-12345")); + + } + + @Test + public void testGetEndpointUrlWithNotSupportedKey() { + + assertThrows("Only keys with G- prefix are supported", + IllegalArgumentException.class, () -> requestBuilder.getEndpointUrl("UA-12345")); + + } + + @Test + public void testGetEndpointUrlWithoutApiSecretConfigured() { + + assertThrows("The API secret must be configured to sent GA4 events", + GoogleAnalyticsClientException.class, () -> requestBuilder.getEndpointUrl("G-12345")); + + } + + @Test + public void testComposeRequestBodiesWithoutEvents() { + + List requestsBody = requestBuilder.composeRequestsBody("G-12345", List.of()); + assertThat(requestsBody, empty()); + + } + + @Test + public void testComposeRequestBodiesWithSingleEvent() { + + GoogleAnalyticsEvent event = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + List requestsBody = requestBuilder.composeRequestsBody("G-12345", List.of(event)); + assertThat(requestsBody, hasSize(1)); + + JSONObject requestBody = new JSONObject(requestsBody.get(0)); + assertThat(requestBody.get("client_id"), is("123")); + + JSONArray eventsArray = requestBody.getJSONArray("events"); + assertThat(eventsArray.length(), is(1)); + + assertEventJsonHasAttributes(eventsArray.getJSONObject(0), "item", "download", "bitstream", "192.168.1.25", + "Chrome", "REF", "/api/documents/123", "Test publication"); + + } + + @Test + public void testComposeRequestBodiesWithManyEventsWithSameClientId() { + + GoogleAnalyticsEvent event1 = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + GoogleAnalyticsEvent event2 = buildEvent("123", "192.168.1.25", "Mozilla Firefox", "REF-2", + "/api/documents/12345", "Test publication 2"); + + List requestsBody = requestBuilder.composeRequestsBody("G-12345", List.of(event1, event2)); + assertThat(requestsBody, hasSize(1)); + + JSONObject requestBody = new JSONObject(requestsBody.get(0)); + assertThat(requestBody.get("client_id"), is("123")); + + JSONArray eventsArray = requestBody.getJSONArray("events"); + assertThat(eventsArray.length(), is(2)); + + JSONObject eventJson1 = findEventJsonByDocumentTitle(eventsArray, "Test publication"); + JSONObject eventJson2 = findEventJsonByDocumentTitle(eventsArray, "Test publication 2"); + + assertThat(eventJson1, notNullValue()); + assertThat(eventJson2, notNullValue()); + + assertEventJsonHasAttributes(eventJson1, "item", "download", "bitstream", "192.168.1.25", + "Chrome", "REF", "/api/documents/123", "Test publication"); + + assertEventJsonHasAttributes(eventJson2, "item", "download", "bitstream", "192.168.1.25", + "Mozilla Firefox", "REF-2", "/api/documents/12345", "Test publication 2"); + + } + + @Test + public void testComposeRequestBodiesWithManyEventsWithDifferentClientId() { + + GoogleAnalyticsEvent event1 = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + GoogleAnalyticsEvent event2 = buildEvent("123", "192.168.1.25", "Mozilla Firefox", "REF-2", + "/api/documents/12345", "Test publication 2"); + + GoogleAnalyticsEvent event3 = buildEvent("987", "192.168.1.13", "Postman", null, + "/api/documents/654", "Test publication 3"); + + List requestsBody = requestBuilder.composeRequestsBody("G-12345", of(event1, event2, event3)); + assertThat(requestsBody, hasSize(2)); + + JSONObject firstRequestBody = findRequestBodyByClientId(requestsBody, "123"); + assertThat(firstRequestBody.get("client_id"), is("123")); + + JSONArray firstEventsArray = firstRequestBody.getJSONArray("events"); + assertThat(firstEventsArray.length(), is(2)); + + JSONObject eventJson1 = findEventJsonByDocumentTitle(firstEventsArray, "Test publication"); + JSONObject eventJson2 = findEventJsonByDocumentTitle(firstEventsArray, "Test publication 2"); + + assertThat(eventJson1, notNullValue()); + assertThat(eventJson2, notNullValue()); + + assertEventJsonHasAttributes(eventJson1, "item", "download", "bitstream", "192.168.1.25", + "Chrome", "REF", "/api/documents/123", "Test publication"); + + assertEventJsonHasAttributes(eventJson2, "item", "download", "bitstream", "192.168.1.25", + "Mozilla Firefox", "REF-2", "/api/documents/12345", "Test publication 2"); + + JSONObject secondRequestBody = findRequestBodyByClientId(requestsBody, "987"); + assertThat(secondRequestBody.get("client_id"), is("987")); + + JSONArray secondEventsArray = secondRequestBody.getJSONArray("events"); + assertThat(secondEventsArray.length(), is(1)); + + assertEventJsonHasAttributes(secondEventsArray.getJSONObject(0), "item", "download", "bitstream", + "192.168.1.13", "Postman", "", "/api/documents/654", "Test publication 3"); + + } + + private void assertEventJsonHasAttributes(JSONObject event, String name, String action, String category, + String userIp, String userAgent, String documentReferrer, String documentPath, String documentTitle) { + + assertThat(event.get("name"), is(name)); + assertThat(event.getJSONObject("params"), notNullValue()); + assertThat(event.getJSONObject("params").get("action"), is(action)); + assertThat(event.getJSONObject("params").get("category"), is(category)); + assertThat(event.getJSONObject("params").get("document_title"), is(documentTitle)); + assertThat(event.getJSONObject("params").get("user_ip"), is(userIp)); + assertThat(event.getJSONObject("params").get("user_agent"), is(userAgent)); + assertThat(event.getJSONObject("params").get("document_referrer"), is(documentReferrer)); + assertThat(event.getJSONObject("params").get("document_path"), is(documentPath)); + assertThat(event.getJSONObject("params").get("time"), notNullValue()); + + } + + private JSONObject findRequestBodyByClientId(List requestsBody, String clientId) { + for (String requestBody : requestsBody) { + JSONObject requestBodyJson = new JSONObject(requestBody); + if (requestBodyJson.get("client_id").equals(clientId)) { + return requestBodyJson; + } + } + return null; + } + + private JSONObject findEventJsonByDocumentTitle(JSONArray events, String documentTitle) { + + for (int i = 0; i < events.length(); i++) { + JSONObject event = events.getJSONObject(i); + assertThat(event.getJSONObject("params"), notNullValue()); + if (event.getJSONObject("params").get("document_title").equals(documentTitle)) { + return event; + } + } + + return null; + } + + private GoogleAnalyticsEvent buildEvent(String clientId, String userIp, String userAgent, + String documentReferrer, String documentPath, String documentTitle) { + return new GoogleAnalyticsEvent(clientId, userIp, userAgent, documentReferrer, documentPath, documentTitle); + } +} diff --git a/dspace-api/src/test/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilderTest.java b/dspace-api/src/test/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilderTest.java new file mode 100644 index 000000000000..bc30c2a124e5 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/google/client/UniversalAnalyticsClientRequestBuilderTest.java @@ -0,0 +1,152 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.google.client; + +import static java.util.List.of; +import static org.apache.commons.lang.StringUtils.countMatches; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThrows; + +import java.util.List; + +import org.dspace.google.GoogleAnalyticsEvent; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit tests for {@link UniversalAnalyticsClientRequestBuilder}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class UniversalAnalyticsClientRequestBuilderTest { + + private UniversalAnalyticsClientRequestBuilder requestBuilder; + + @Before + public void setup() { + requestBuilder = new UniversalAnalyticsClientRequestBuilder("https://google-analytics/test"); + } + + @Test + public void testGetEndpointUrl() { + + String endpointUrl = requestBuilder.getEndpointUrl("UA-12345"); + assertThat(endpointUrl, is("https://google-analytics/test")); + + } + + @Test + public void testComposeRequestBodiesWithoutEvents() { + + List requestsBody = requestBuilder.composeRequestsBody("UA-12345", List.of()); + assertThat(requestsBody, empty()); + + } + + @Test + public void testComposeRequestBodiesWithNotSupportedKey() { + + GoogleAnalyticsEvent event = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + assertThrows("Only keys with G- prefix are supported", + IllegalArgumentException.class, () -> requestBuilder.composeRequestsBody("G-12345", List.of(event))); + + } + + @Test + public void testComposeRequestBodiesWithSingleEvent() { + + GoogleAnalyticsEvent event = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + List requestsBody = requestBuilder.composeRequestsBody("UA-12345", List.of(event)); + assertThat(requestsBody, hasSize(1)); + + String requestBody = requestsBody.get(0); + assertThat(countMatches(requestBody, "&qt="), is(1)); + + String requestBodyWithoutTime = removeAllTimeSections(requestBody); + + String expectedRequestBodyWithoutTime = "v=1&tid=UA-12345&cid=123&t=event&uip=192.168.1.25&ua=Chrome&dr=REF" + + "&dp=%2Fapi%2Fdocuments%2F123&dt=Test+publication&ec=bitstream&ea=download&el=item"; + + assertThat(requestBodyWithoutTime, is(expectedRequestBodyWithoutTime)); + + } + + @Test + public void testComposeRequestBodiesWithManyEventsWithSameClientId() { + + GoogleAnalyticsEvent event1 = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + GoogleAnalyticsEvent event2 = buildEvent("123", "192.168.1.25", "Mozilla Firefox", "REF-2", + "/api/documents/12345", "Test publication 2"); + + List requestsBody = requestBuilder.composeRequestsBody("UA-12345", List.of(event1, event2)); + assertThat(requestsBody, hasSize(1)); + String requestBody = requestsBody.get(0); + + assertThat(countMatches(requestBody, "&qt="), is(2)); + + String requestBodyWithoutTime = removeAllTimeSections(requestBody); + + String expectedRequestBodyWithoutTime = "v=1&tid=UA-12345&cid=123&t=event&uip=192.168.1.25&ua=Chrome&dr=REF" + + "&dp=%2Fapi%2Fdocuments%2F123&dt=Test+publication&ec=bitstream&ea=download&el=item\n" + + "v=1&tid=UA-12345&cid=123&t=event&uip=192.168.1.25&ua=Mozilla+Firefox&dr=REF-2" + + "&dp=%2Fapi%2Fdocuments%2F12345&dt=Test+publication+2&ec=bitstream&ea=download&el=item"; + + assertThat(requestBodyWithoutTime, is(expectedRequestBodyWithoutTime)); + + } + + @Test + public void testComposeRequestBodiesWithManyEventsWithDifferentClientId() { + + GoogleAnalyticsEvent event1 = buildEvent("123", "192.168.1.25", "Chrome", "REF", + "/api/documents/123", "Test publication"); + + GoogleAnalyticsEvent event2 = buildEvent("123", "192.168.1.25", "Mozilla Firefox", "REF-2", + "/api/documents/12345", "Test publication 2"); + + GoogleAnalyticsEvent event3 = buildEvent("987", "192.168.1.13", "Postman", null, + "/api/documents/654", "Test publication 3"); + + List requestsBody = requestBuilder.composeRequestsBody("UA-12345", of(event1, event2, event3)); + assertThat(requestsBody, hasSize(1)); + String requestBody = requestsBody.get(0); + + assertThat(countMatches(requestBody, "&qt="), is(3)); + + String requestBodyWithoutTime = removeAllTimeSections(requestBody); + + String expectedRequestBodyWithoutTime = "v=1&tid=UA-12345&cid=123&t=event&uip=192.168.1.25&ua=Chrome&dr=REF" + + "&dp=%2Fapi%2Fdocuments%2F123&dt=Test+publication&ec=bitstream&ea=download&el=item\n" + + "v=1&tid=UA-12345&cid=123&t=event&uip=192.168.1.25&ua=Mozilla+Firefox&dr=REF-2" + + "&dp=%2Fapi%2Fdocuments%2F12345&dt=Test+publication+2&ec=bitstream&ea=download&el=item\n" + + "v=1&tid=UA-12345&cid=987&t=event&uip=192.168.1.13&ua=Postman&dr=" + + "&dp=%2Fapi%2Fdocuments%2F654&dt=Test+publication+3&ec=bitstream&ea=download&el=item"; + + assertThat(requestBodyWithoutTime, is(expectedRequestBodyWithoutTime)); + + } + + private String removeAllTimeSections(String requestBody) { + return requestBody.replaceAll("&qt=\\d+", ""); + } + + private GoogleAnalyticsEvent buildEvent(String clientId, String userIp, String userAgent, + String documentReferrer, String documentPath, String documentTitle) { + return new GoogleAnalyticsEvent(clientId, userIp, userAgent, documentReferrer, documentPath, documentTitle); + } +} diff --git a/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java b/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java index d79ba60450ef..09387acd3ee3 100644 --- a/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java +++ b/dspace-api/src/test/java/org/dspace/identifier/DOIIdentifierProviderTest.java @@ -9,7 +9,9 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.junit.Assume.assumeNotNull; @@ -34,12 +36,16 @@ import org.dspace.content.MetadataValue; import org.dspace.content.WorkspaceItem; import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.logic.DefaultFilter; +import org.dspace.content.logic.LogicalStatement; +import org.dspace.content.logic.TrueFilter; import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; import org.dspace.identifier.doi.DOIConnector; import org.dspace.identifier.doi.DOIIdentifierException; +import org.dspace.identifier.doi.DOIIdentifierNotApplicableException; import org.dspace.identifier.factory.IdentifierServiceFactory; import org.dspace.identifier.service.DOIService; import org.dspace.services.ConfigurationService; @@ -125,6 +131,7 @@ public void init() { provider.itemService = itemService; provider.setConfigurationService(config); provider.setDOIConnector(connector); + provider.setFilter(null); } catch (AuthorizeException ex) { log.error("Authorization Error in init", ex); fail("Authorization Error in init: " + ex.getMessage()); @@ -187,7 +194,7 @@ private Item newItem() List remainder = new ArrayList<>(); for (MetadataValue id : metadata) { - if (!id.getValue().startsWith(DOI.RESOLVER)) { + if (!id.getValue().startsWith(doiService.getResolver())) { remainder.add(id.getValue()); } } @@ -274,11 +281,11 @@ public void testSupports_valid_String() { PREFIX + "/" + NAMESPACE_SEPARATOR + "lkjljasd1234", DOI.SCHEME + "10.5072/123abc-lkj/kljl", "http://dx.doi.org/10.5072/123abc-lkj/kljl", - DOI.RESOLVER + "/10.5072/123abc-lkj/kljl" + doiService.getResolver() + "/10.5072/123abc-lkj/kljl" }; for (String doi : validDOIs) { - assertTrue("DOI should be supported", provider.supports(doi)); + assertTrue("DOI " + doi + " should be supported", provider.supports(doi)); } } @@ -499,7 +506,7 @@ public void testMintDOI() Item item = newItem(); String doi = null; try { - // get a DOI: + // get a DOI (skipping any filters) doi = provider.mint(context, item); } catch (IdentifierException e) { e.printStackTrace(System.err); @@ -530,6 +537,74 @@ public void testMint_returns_existing_DOI() assertEquals("Mint did not returned an existing DOI!", doi, retrievedDOI); } + /** + * Test minting a DOI with a filter that always returns false and therefore never mints the DOI + */ + @Test + public void testMint_DOI_withNonMatchingFilter() + throws SQLException, AuthorizeException, IOException, IllegalAccessException, IdentifierException, + WorkflowException { + Item item = newItem(); + boolean wasFiltered = false; + try { + // Mint this with the filter + DefaultFilter doiFilter = new DefaultFilter(); + LogicalStatement alwaysFalse = (context, i) -> false; + doiFilter.setStatement(alwaysFalse); + // get a DOI with the method that applies filters by default + provider.mint(context, item, doiFilter); + } catch (DOIIdentifierNotApplicableException e) { + // This is what we wanted to see - we can return safely + wasFiltered = true; + } catch (IdentifierException e) { + e.printStackTrace(); + fail("Got an IdentifierException: " + e.getMessage()); + } + // Fail the test if the filter didn't throw a "not applicable" exception + assertTrue("DOI minting attempt was not filtered by filter service", wasFiltered); + } + + /** + * Test minting a DOI with a filter that always returns true and therefore allows the DOI to be minted + * (this should have hte same results as base testMint_DOI, but here we use an explicit filter rather than null) + */ + @Test + public void testMint_DOI_withMatchingFilter() + throws SQLException, AuthorizeException, IOException, IllegalAccessException, IdentifierException, + WorkflowException { + Item item = newItem(); + String doi = null; + boolean wasFiltered = false; + try { + // Temporarily set the provider to have a filter that always returns true for an item + // (therefore, the item is allowed to have a DOI minted) + DefaultFilter doiFilter = new DefaultFilter(); + LogicalStatement alwaysTrue = (context, i) -> true; + doiFilter.setStatement(alwaysTrue); + // get a DOI with the method that applies filters by default + doi = provider.mint(context, item, doiFilter); + } catch (DOIIdentifierNotApplicableException e) { + // This is what we wanted to see - we can return safely + wasFiltered = true; + } catch (IdentifierException e) { + e.printStackTrace(); + fail("Got an IdentifierException: " + e.getMessage()); + } + // If the attempt was filtered, fail + assertFalse("DOI minting attempt was incorrectly filtered by filter service", wasFiltered); + + // Continue with regular minting tests + assertNotNull("Minted DOI is null!", doi); + assertFalse("Minted DOI is empty!", doi.isEmpty()); + try { + doiService.formatIdentifier(doi); + } catch (Exception e) { + e.printStackTrace(); + fail("Minted an unrecognizable DOI: " + e.getMessage()); + } + } + + @Test public void testReserve_DOI() throws SQLException, SQLException, AuthorizeException, IOException, @@ -584,7 +659,10 @@ public void testCreate_and_Register_DOI() IdentifierException, WorkflowException, IllegalAccessException { Item item = newItem(); - String doi = provider.register(context, item); + // Register, skipping the filter + String doi = provider.register(context, item, + DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName( + "always_true_filter", TrueFilter.class)); // we want the created DOI to be returned in the following format: // doi:10./. @@ -682,6 +760,104 @@ public void testDelete_all_DOIs() DOIIdentifierProvider.TO_BE_DELETED.equals(doiRow2.getStatus())); } + @Test + public void testUpdateMetadataSkippedForPending() + throws SQLException, AuthorizeException, IOException, IdentifierException, IllegalAccessException, + WorkflowException { + context.turnOffAuthorisationSystem(); + Item item = newItem(); + // Mint a new DOI with PENDING status + String doi1 = this.createDOI(item, DOIIdentifierProvider.PENDING, true); + // Update metadata for the item. + // This would normally shift status to UPDATE_REGISTERED, UPDATE_BEFORE_REGISTERING or UPDATE_RESERVED. + // But if the DOI is just pending, it should return without changing anything. + provider.updateMetadata(context, item, doi1); + // Get the DOI from the service + DOI doi = doiService.findDOIByDSpaceObject(context, item); + // Ensure it is still PENDING + assertEquals("Status of updated DOI did not remain PENDING", + DOIIdentifierProvider.PENDING, doi.getStatus()); + context.restoreAuthSystemState(); + } + + + @Test + public void testMintDoiAfterOrphanedPendingDOI() + throws SQLException, AuthorizeException, IOException, IdentifierException, IllegalAccessException, + WorkflowException { + context.turnOffAuthorisationSystem(); + Item item1 = newItem(); + // Mint a new DOI with PENDING status + String doi1 = this.createDOI(item1, DOIIdentifierProvider.PENDING, true); + // remove the item + itemService.delete(context, item1); + // Get the DOI from the service + DOI doi = doiService.findDOIByDSpaceObject(context, item1); + // ensure DOI has no state + assertNull("Orphaned DOI was not set deleted", doi); + // create a new item and a new DOI + Item item2 = newItem(); + String doi2 = null; + try { + // get a DOI (skipping any filters) + doi2 = provider.mint(context, item2); + } catch (IdentifierException e) { + e.printStackTrace(System.err); + fail("Got an IdentifierException: " + e.getMessage()); + } + + assertNotNull("Minted DOI is null?!", doi2); + assertFalse("Minted DOI is empty!", doi2.isEmpty()); + assertNotEquals("Minted DOI equals previously orphaned DOI.", doi1, doi2); + + try { + doiService.formatIdentifier(doi2); + } catch (DOIIdentifierException e) { + e.printStackTrace(System.err); + fail("Minted an unrecognizable DOI: " + e.getMessage()); + } + + context.restoreAuthSystemState(); + } + + @Test + public void testUpdateMetadataSkippedForMinted() + throws SQLException, AuthorizeException, IOException, IdentifierException, IllegalAccessException, + WorkflowException { + context.turnOffAuthorisationSystem(); + Item item = newItem(); + // Mint a new DOI with MINTED status + String doi1 = this.createDOI(item, DOIIdentifierProvider.MINTED, true); + // Update metadata for the item. + // This would normally shift status to UPDATE_REGISTERED, UPDATE_BEFORE_REGISTERING or UPDATE_RESERVED. + // But if the DOI is just minted, it should return without changing anything. + provider.updateMetadata(context, item, doi1); + // Get the DOI from the service + DOI doi = doiService.findDOIByDSpaceObject(context, item); + // Ensure it is still MINTED + assertEquals("Status of updated DOI did not remain PENDING", + DOIIdentifierProvider.MINTED, doi.getStatus()); + context.restoreAuthSystemState(); + } + + @Test + public void testLoadOrCreateDOIReturnsMintedStatus() + throws SQLException, AuthorizeException, IOException, IdentifierException, IllegalAccessException, + WorkflowException { + Item item = newItem(); + // Mint a DOI without an explicit reserve or register context + String mintedDoi = provider.mint(context, item, DSpaceServicesFactory.getInstance() + .getServiceManager().getServiceByName("always_true_filter", TrueFilter.class)); + DOI doi = doiService.findByDoi(context, mintedDoi.substring(DOI.SCHEME.length())); + // This should be minted + assertEquals("DOI is not of 'minted' status", DOIIdentifierProvider.MINTED, doi.getStatus()); + provider.updateMetadata(context, item, mintedDoi); + DOI secondFind = doiService.findByDoi(context, mintedDoi.substring(DOI.SCHEME.length())); + // After an update, this should still be minted + assertEquals("DOI is not of 'minted' status", + DOIIdentifierProvider.MINTED, secondFind.getStatus()); + + } // test the following methods using the MockDOIConnector. // updateMetadataOnline diff --git a/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java new file mode 100644 index 000000000000..7e549f6cae33 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/identifier/VersionedHandleIdentifierProviderIT.java @@ -0,0 +1,115 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.identifier; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.VersionBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.kernel.ServiceManager; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.Before; +import org.junit.Test; + +public class VersionedHandleIdentifierProviderIT extends AbstractIntegrationTestWithDatabase { + private ServiceManager serviceManager; + private IdentifierServiceImpl identifierService; + + private String firstHandle; + + private Collection collection; + private Item itemV1; + private Item itemV2; + private Item itemV3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + + serviceManager = DSpaceServicesFactory.getInstance().getServiceManager(); + identifierService = serviceManager.getServicesByType(IdentifierServiceImpl.class).get(0); + // Clean out providers to avoid any being used for creation of community and collection + identifierService.setProviders(new ArrayList<>()); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + } + + private void registerProvider(Class type) { + // Register our new provider + serviceManager.registerServiceClass(type.getName(), type); + IdentifierProvider identifierProvider = + (IdentifierProvider) serviceManager.getServiceByName(type.getName(), type); + + // Overwrite the identifier-service's providers with the new one to ensure only this provider is used + identifierService.setProviders(List.of(identifierProvider)); + } + + private void createVersions() throws SQLException, AuthorizeException { + itemV1 = ItemBuilder.createItem(context, collection) + .withTitle("First version") + .build(); + firstHandle = itemV1.getHandle(); + itemV2 = VersionBuilder.createVersion(context, itemV1, "Second version").build().getItem(); + itemV3 = VersionBuilder.createVersion(context, itemV1, "Third version").build().getItem(); + } + + @Test + public void testDefaultVersionedHandleProvider() throws Exception { + registerProvider(VersionedHandleIdentifierProvider.class); + createVersions(); + + // Confirm the original item only has its original handle + assertEquals(firstHandle, itemV1.getHandle()); + assertEquals(1, itemV1.getHandles().size()); + // Confirm the second item has the correct version handle + assertEquals(firstHandle + ".2", itemV2.getHandle()); + assertEquals(1, itemV2.getHandles().size()); + // Confirm the last item has the correct version handle + assertEquals(firstHandle + ".3", itemV3.getHandle()); + assertEquals(1, itemV3.getHandles().size()); + } + + @Test + public void testCanonicalVersionedHandleProvider() throws Exception { + registerProvider(VersionedHandleIdentifierProviderWithCanonicalHandles.class); + createVersions(); + + // Confirm the original item only has a version handle + assertEquals(firstHandle + ".1", itemV1.getHandle()); + assertEquals(1, itemV1.getHandles().size()); + // Confirm the second item has the correct version handle + assertEquals(firstHandle + ".2", itemV2.getHandle()); + assertEquals(1, itemV2.getHandles().size()); + // Confirm the last item has both the correct version handle and the original handle + assertEquals(firstHandle, itemV3.getHandle()); + assertEquals(2, itemV3.getHandles().size()); + containsHandle(itemV3, firstHandle + ".3"); + } + + private void containsHandle(Item item, String handle) { + assertTrue(item.getHandles().stream().anyMatch(h -> handle.equals(h.getHandle()))); + } +} diff --git a/dspace-api/src/test/java/org/dspace/iiif/MockIIIFApiQueryServiceImpl.java b/dspace-api/src/test/java/org/dspace/iiif/MockIIIFApiQueryServiceImpl.java new file mode 100644 index 000000000000..a240e76f9792 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/iiif/MockIIIFApiQueryServiceImpl.java @@ -0,0 +1,20 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif; + +import org.dspace.content.Bitstream; + +/** + * Mock for the IIIFApiQueryService. + * @author Michael Spalti (mspalti at willamette.edu) + */ +public class MockIIIFApiQueryServiceImpl extends IIIFApiQueryServiceImpl { + public int[] getImageDimensions(Bitstream bitstream) { + return new int[]{64, 64}; + } +} diff --git a/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java b/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java new file mode 100644 index 000000000000..7dba38c987b7 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/iiif/canvasdimension/CanvasDimensionsIT.java @@ -0,0 +1,600 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.iiif.canvasdimension; + +import static org.junit.Assert.assertTrue; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.PrintStream; +import java.util.regex.Pattern; + +import org.apache.commons.lang.StringUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class CanvasDimensionsIT extends AbstractIntegrationTestWithDatabase { + + protected Community child1; + protected Community child2; + protected Collection col1; + protected Collection col2; + protected Collection col3; + protected Item iiifItem; + protected Item iiifItem2; + protected Item iiifItem3; + protected Bitstream bitstream; + protected Bitstream bitstream2; + + private final static String METADATA_IIIF_HEIGHT = "iiif.image.height"; + private final static String METADATA_IIIF_WIDTH = "iiif.image.width"; + + private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); + private final PrintStream originalOut = System.out; + + @Before + public void setup() throws IOException { + + System.setOut(new PrintStream(outContent)); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community 1") + .build(); + child2 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community 2") + .build(); + + col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + col2 = CollectionBuilder.createCollection(context, child2).withName("Collection 2").build(); + + context.restoreAuthSystemState(); + } + + @After + @Override + public void destroy() throws Exception { + System.setOut(originalOut); + super.destroy(); + } + + @Test + public void processItemNoForce() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstream (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .build(); + context.restoreAuthSystemState(); + + String handle = iiifItem.getHandle(); + execCanvasScript(handle); + // The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400 + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("400"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("600"))); + + } + + @Test + public void processCollectionNoForce() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstream (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .build(); + context.restoreAuthSystemState(); + + String id = col1.getID().toString(); + execCanvasScript(id); + // The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400 + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("400"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("600"))); + + } + + @Test + public void processSubCommunityNoForce() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstream (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .build(); + context.restoreAuthSystemState(); + + String id = child1.getID().toString(); + execCanvasScript(id); + + // The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400 + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("400"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("600"))); + + } + + @Test + public void processParentCommunityNoForce() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstream (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .build(); + context.restoreAuthSystemState(); + + String handle = parentCommunity.getHandle(); + execCanvasScript(handle); + // The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400 + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("400"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("600"))); + + } + + @Test + public void processParentCommunityMultipleSubsNoForce() throws Exception { + context.turnOffAuthorisationSystem(); + // Create new Items + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + iiifItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test Item2") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstreams (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .build(); + + input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream2 = BitstreamBuilder + .createBitstream(context, iiifItem2, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .build(); + context.restoreAuthSystemState(); + String id = parentCommunity.getID().toString(); + execCanvasScript(id); + + // All bitstreams should be updated with canvas metadata. + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("400"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("600"))); + assertTrue(bitstream2.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("400"))); + assertTrue(bitstream2.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("600"))); + + } + + @Test + public void processItemWithForce() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstream (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + context.restoreAuthSystemState(); + + String id = iiifItem.getID().toString(); + execCanvasScriptForceOption(id); + + // The existing metadata should be updated + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("400"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("600"))); + + } + + @Test + public void processCollectionWithForce() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstream (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + context.restoreAuthSystemState(); + + String id = col1.getID().toString(); + execCanvasScriptForceOption(id); + + // The existing metadata should be updated + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("400"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("600"))); + + } + + @Test + public void processItemWithExistingMetadata() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstream (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + context.restoreAuthSystemState(); + + String handle = iiifItem.getHandle(); + execCanvasScript(handle); + // The existing canvas metadata should be unchanged + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("100"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("100"))); + + } + + + @Test + public void processItemWithJp2File() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jp2 image to verify image server call for dimensions + InputStream input = this.getClass().getResourceAsStream("cat.jp2"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jp2") + .withMimeType("image/jp2") + .build(); + + context.restoreAuthSystemState(); + + String id = iiifItem.getID().toString(); + + execCanvasScript(id); + + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("64"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("64"))); + + } + + @Test + public void processParentCommunityWithMaximum() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item 1") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + // Second item + iiifItem2 = ItemBuilder.createItem(context, col1) + .withTitle("Test Item 2") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + // Third item so we can test max2process + iiifItem3 = ItemBuilder.createItem(context, col1) + .withTitle("Test Item3") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstream (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream2 = BitstreamBuilder + .createBitstream(context, iiifItem2, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + input = this.getClass().getResourceAsStream("cat.jpg"); + Bitstream bitstream3 = BitstreamBuilder + .createBitstream(context, iiifItem3, input) + .withName("Bitstream3.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + + context.restoreAuthSystemState(); + + String id = parentCommunity.getID().toString(); + + execCanvasScriptWithMaxRecs(id); + // check System.out for number of items processed. + Pattern regex = Pattern.compile(".*2 IIIF items were processed", Pattern.DOTALL); + assertTrue(regex.matcher(StringUtils.chomp(outContent.toString())).find()); + } + + @Test + public void processParentCommunityWithMultipleSkip() throws Exception { + context.turnOffAuthorisationSystem(); + col3 = CollectionBuilder.createCollection(context, child1).withName("Collection 3").build(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + // Second item so we can test max2process + iiifItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + iiifItem3 = ItemBuilder.createItem(context, col3) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstream (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream2 = BitstreamBuilder + .createBitstream(context, iiifItem2, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + + input = this.getClass().getResourceAsStream("cat.jpg"); + Bitstream bitstream3 = BitstreamBuilder + .createBitstream(context, iiifItem3, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + + context.restoreAuthSystemState(); + + String id = parentCommunity.getID().toString(); + + execCanvasScriptWithSkipList(id,col2.getHandle() + "," + col3.getHandle()); + // The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400 + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("400"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("600"))); + // Second bitstream should be unchanged because its within a skipped collection + assertTrue(bitstream2.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("100"))); + assertTrue(bitstream2.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("100"))); + // Third bitstream should be unchanged because its within a skipped collection + assertTrue(bitstream3.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("100"))); + assertTrue(bitstream3.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("100"))); + + } + + @Test + public void processParentCommunityWithSingleSkip() throws Exception { + context.turnOffAuthorisationSystem(); + // Create a new Item + iiifItem = ItemBuilder.createItem(context, col1) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + // Second item so we can test max2process + iiifItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test Item") + .withIssueDate("2017-10-17") + .enableIIIF() + .build(); + + // Add jpeg image bitstream (300 x 200) + InputStream input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream = BitstreamBuilder + .createBitstream(context, iiifItem, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + input = this.getClass().getResourceAsStream("cat.jpg"); + bitstream2 = BitstreamBuilder + .createBitstream(context, iiifItem2, input) + .withName("Bitstream2.jpg") + .withMimeType("image/jpeg") + .withIIIFCanvasWidth(100) + .withIIIFCanvasHeight(100) + .build(); + + context.restoreAuthSystemState(); + + String id = parentCommunity.getID().toString(); + + execCanvasScriptWithSkipList(id, col2.getHandle()); + // The test image is small so the canvas dimension should be doubled, e.g. height 200 -> height 400 + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("400"))); + assertTrue(bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("600"))); + // Second bitstream should be unchanged because it's inside a skipped collection + assertTrue(bitstream2.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_HEIGHT)) + .anyMatch(m -> m.getValue().contentEquals("100"))); + assertTrue(bitstream2.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_WIDTH)) + .anyMatch(m -> m.getValue().contentEquals("100"))); + + } + + private void execCanvasScript(String id) throws Exception { + runDSpaceScript("iiif-canvas-dimensions", "-e", "admin@email.com", "-i", id); + } + + private void execCanvasScriptForceOption(String id) throws Exception { + runDSpaceScript("iiif-canvas-dimensions", "-e", "admin@email.com", "-i", id, "-f"); + } + + private void execCanvasScriptWithMaxRecs(String id) throws Exception { + // maximum 2 + runDSpaceScript("iiif-canvas-dimensions", "-e", "admin@email.com", "-i", id, "-m", "2", "-f", "-q"); + } + + private void execCanvasScriptWithSkipList(String id, String skip) throws Exception { + runDSpaceScript("iiif-canvas-dimensions", "-e", "admin@email.com", "-i", id, "-s", skip, "-f"); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessorTest.java b/dspace-api/src/test/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessorTest.java new file mode 100644 index 000000000000..323856cd0a7d --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessorTest.java @@ -0,0 +1,38 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import static org.junit.Assert.assertEquals; + +import java.util.Collection; + +import org.junit.Test; + +/** + * + * @author mwood + */ +public class CrossRefDateMetadataProcessorTest { + /** + * Test of processMetadata method, of class CrossRefDateMetadataProcessor. + */ + @Test + public void testProcessMetadata() { + CrossRefDateMetadataProcessor unit = new CrossRefDateMetadataProcessor(); + unit.setPathToArray("/dates"); + Collection metadata = unit.processMetadata("{\"dates\": [" + + "[1957, 1, 27]," + + "[1957, 1]," + + "[1957]" + + "]}"); + String[] metadataValues = (String[]) metadata.toArray(new String[3]); + assertEquals("[yyyy, MM, dd] should parse", "1957-01-27", metadataValues[0]); + assertEquals("[yyyy, MM] should parse", "1957-01", metadataValues[1]); + assertEquals("[yyyy] should parse", "1957", metadataValues[2]); + } +} diff --git a/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java b/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java index bc687a43f5fa..30a5a3a9b51d 100644 --- a/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java +++ b/dspace-api/src/test/java/org/dspace/license/MockCCLicenseConnectorServiceImpl.java @@ -9,14 +9,14 @@ import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; import java.util.HashMap; -import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; -import org.jdom.Document; -import org.jdom.JDOMException; +import org.jdom2.Document; +import org.jdom2.JDOMException; /** * Mock implementation for the Creative commons license connector service. @@ -51,7 +51,7 @@ private CCLicense createMockLicense(int count, int[] amountOfFieldsAndEnums) { } private List createMockLicenseFields(int count, int[] amountOfFieldsAndEnums) { - List ccLicenseFields = new LinkedList<>(); + List ccLicenseFields = new ArrayList<>(amountOfFieldsAndEnums.length); for (int index = 0; index < amountOfFieldsAndEnums.length; index++) { String licenseFieldId = "license" + count + "-field" + index; String licenseFieldLabel = "License " + count + " - Field " + index + " - Label"; @@ -70,7 +70,7 @@ private List createMockLicenseFields(int count, int[] amountOfFi } private List createMockLicenseFields(int count, int index, int amountOfEnums) { - List ccLicenseFieldEnumList = new LinkedList<>(); + List ccLicenseFieldEnumList = new ArrayList<>(amountOfEnums); for (int i = 0; i < amountOfEnums; i++) { String enumId = "license" + count + "-field" + index + "-enum" + i; String enumLabel = "License " + count + " - Field " + index + " - Enum " + i + " - Label"; diff --git a/dspace-api/src/test/java/org/dspace/matcher/DateMatcher.java b/dspace-api/src/test/java/org/dspace/matcher/DateMatcher.java new file mode 100644 index 000000000000..c544bb74e62c --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/matcher/DateMatcher.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.matcher; + +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Date; + +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; + +/** + * Hamcrest Matcher for comparing a Date with an ISO 8601 zoned string form + * of a date. + * + * @author Mark H. Wood + */ +public class DateMatcher + extends BaseMatcher { + private static final SimpleDateFormat dateFormat + = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX"); + + private final Date matchDate; + + /** + * Create a matcher for a given Date. + * @param matchDate The date that tested values should match. + */ + public DateMatcher(Date matchDate) { + this.matchDate = matchDate; + } + + @Override + public boolean matches(Object testDate) { + // null : null is a match + if (null == matchDate && null == testDate) { + return true; + } + + // Null matchDate never matches non-null testDate + if (null == matchDate) { + return false; + } + + // We only match strings here + if (!(testDate instanceof String)) { + throw new IllegalArgumentException("Argument not a String"); + } + + // Decode the string to a Date + Date testDateDecoded; + try { + testDateDecoded = dateFormat.parse((String)testDate); + } catch (ParseException ex) { + throw new IllegalArgumentException("Argument '" + testDate + + "' is not an ISO 8601 zoned date", ex); + } + + // Compare with the Date that must match + return matchDate.equals(testDateDecoded); + } + + @Override + public void describeTo(Description description) { + description.appendText("is the same date as "); + description.appendText(dateFormat.format(matchDate)); + } + + /** + * Return a Matcher for a given Date. + * @param matchDate the date which tested values should match. + * @return a new Matcher for matchDate. + */ + static public DateMatcher dateMatcher(Date matchDate) { + return new DateMatcher(matchDate); + } +} diff --git a/dspace-api/src/test/java/org/dspace/matcher/QAEventMatcher.java b/dspace-api/src/test/java/org/dspace/matcher/QAEventMatcher.java new file mode 100644 index 000000000000..52f3704a74b7 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/matcher/QAEventMatcher.java @@ -0,0 +1,117 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.matcher; + +import static org.dspace.content.QAEvent.OPENAIRE_SOURCE; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +import org.dspace.content.Item; +import org.dspace.content.QAEvent; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +/** + * Implementation of {@link org.hamcrest.Matcher} to match a QAEvent by all its + * attributes. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class QAEventMatcher extends TypeSafeMatcher { + + private Matcher eventIdMatcher; + + private Matcher originalIdMatcher; + + private Matcher relatedMatcher; + + private Matcher sourceMatcher; + + private Matcher statusMatcher; + + private Matcher targetMatcher; + + private Matcher titleMatcher; + + private Matcher messageMatcher; + + private Matcher topicMatcher; + + private Matcher trustMatcher; + + private QAEventMatcher(Matcher eventIdMatcher, Matcher originalIdMatcher, + Matcher relatedMatcher, Matcher sourceMatcher, Matcher statusMatcher, + Matcher targetMatcher, Matcher titleMatcher, Matcher messageMatcher, + Matcher topicMatcher, Matcher trustMatcher) { + this.eventIdMatcher = eventIdMatcher; + this.originalIdMatcher = originalIdMatcher; + this.relatedMatcher = relatedMatcher; + this.sourceMatcher = sourceMatcher; + this.statusMatcher = statusMatcher; + this.targetMatcher = targetMatcher; + this.titleMatcher = titleMatcher; + this.messageMatcher = messageMatcher; + this.topicMatcher = topicMatcher; + this.trustMatcher = trustMatcher; + } + + /** + * Creates an instance of {@link QAEventMatcher} that matches an OPENAIRE + * QAEvent with PENDING status, with an event id, without a related item and + * with the given attributes. + * + * @param originalId the original id to match + * @param target the target to match + * @param title the title to match + * @param message the message to match + * @param topic the topic to match + * @param trust the trust to match + * @return the matcher istance + */ + public static QAEventMatcher pendingOpenaireEventWith(String originalId, Item target, + String title, String message, String topic, Double trust) { + + return new QAEventMatcher(notNullValue(String.class), is(originalId), nullValue(String.class), + is(OPENAIRE_SOURCE), is("PENDING"), is(target.getID().toString()), is(title), is(message), is(topic), + is(trust)); + + } + + @Override + public boolean matchesSafely(QAEvent event) { + return eventIdMatcher.matches(event.getEventId()) + && originalIdMatcher.matches(event.getOriginalId()) + && relatedMatcher.matches(event.getRelated()) + && sourceMatcher.matches(event.getSource()) + && statusMatcher.matches(event.getStatus()) + && targetMatcher.matches(event.getTarget()) + && titleMatcher.matches(event.getTitle()) + && messageMatcher.matches(event.getMessage()) + && topicMatcher.matches(event.getTopic()) + && trustMatcher.matches(event.getTrust()); + } + + @Override + public void describeTo(Description description) { + description.appendText("a QA event with the following attributes:") + .appendText(" event id ").appendDescriptionOf(eventIdMatcher) + .appendText(", original id ").appendDescriptionOf(originalIdMatcher) + .appendText(", related ").appendDescriptionOf(relatedMatcher) + .appendText(", source ").appendDescriptionOf(sourceMatcher) + .appendText(", status ").appendDescriptionOf(statusMatcher) + .appendText(", target ").appendDescriptionOf(targetMatcher) + .appendText(", title ").appendDescriptionOf(titleMatcher) + .appendText(", message ").appendDescriptionOf(messageMatcher) + .appendText(", topic ").appendDescriptionOf(topicMatcher) + .appendText(" and trust ").appendDescriptionOf(trustMatcher); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/matcher/QASourceMatcher.java b/dspace-api/src/test/java/org/dspace/matcher/QASourceMatcher.java new file mode 100644 index 000000000000..fe3b7130b543 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/matcher/QASourceMatcher.java @@ -0,0 +1,58 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.matcher; + +import static org.hamcrest.Matchers.is; + +import org.dspace.qaevent.QASource; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +/** + * Implementation of {@link org.hamcrest.Matcher} to match a QASource by all its + * attributes. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class QASourceMatcher extends TypeSafeMatcher { + + private Matcher nameMatcher; + + private Matcher totalEventsMatcher; + + private QASourceMatcher(Matcher nameMatcher, Matcher totalEventsMatcher) { + this.nameMatcher = nameMatcher; + this.totalEventsMatcher = totalEventsMatcher; + } + + /** + * Creates an instance of {@link QASourceMatcher} that matches a QATopic with + * the given name and total events count. + * @param name the name to match + * @param totalEvents the total events count to match + * @return the matcher instance + */ + public static QASourceMatcher with(String name, long totalEvents) { + return new QASourceMatcher(is(name), is(totalEvents)); + } + + @Override + public boolean matchesSafely(QASource event) { + return nameMatcher.matches(event.getName()) && totalEventsMatcher.matches(event.getTotalEvents()); + } + + @Override + public void describeTo(Description description) { + description.appendText("a QA source with the following attributes:") + .appendText(" name ").appendDescriptionOf(nameMatcher) + .appendText(" and total events ").appendDescriptionOf(totalEventsMatcher); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/matcher/QATopicMatcher.java b/dspace-api/src/test/java/org/dspace/matcher/QATopicMatcher.java new file mode 100644 index 000000000000..dd93972814a2 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/matcher/QATopicMatcher.java @@ -0,0 +1,58 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.matcher; + +import static org.hamcrest.Matchers.is; + +import org.dspace.qaevent.QATopic; +import org.hamcrest.Description; +import org.hamcrest.Matcher; +import org.hamcrest.TypeSafeMatcher; + +/** + * Implementation of {@link org.hamcrest.Matcher} to match a QATopic by all its + * attributes. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class QATopicMatcher extends TypeSafeMatcher { + + private Matcher keyMatcher; + + private Matcher totalEventsMatcher; + + private QATopicMatcher(Matcher keyMatcher, Matcher totalEventsMatcher) { + this.keyMatcher = keyMatcher; + this.totalEventsMatcher = totalEventsMatcher; + } + + /** + * Creates an instance of {@link QATopicMatcher} that matches a QATopic with the + * given key and total events count. + * @param key the key to match + * @param totalEvents the total events count to match + * @return the matcher instance + */ + public static QATopicMatcher with(String key, long totalEvents) { + return new QATopicMatcher(is(key), is(totalEvents)); + } + + @Override + public boolean matchesSafely(QATopic event) { + return keyMatcher.matches(event.getKey()) && totalEventsMatcher.matches(event.getTotalEvents()); + } + + @Override + public void describeTo(Description description) { + description.appendText("a QA topic with the following attributes:") + .appendText(" key ").appendDescriptionOf(keyMatcher) + .appendText(" and total events ").appendDescriptionOf(totalEventsMatcher); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/matcher/SubscribeMatcher.java b/dspace-api/src/test/java/org/dspace/matcher/SubscribeMatcher.java new file mode 100644 index 000000000000..4671e65d3875 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/matcher/SubscribeMatcher.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ + +package org.dspace.matcher; + +import java.util.List; +import java.util.stream.Collectors; + +import org.dspace.content.DSpaceObject; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Subscription; +import org.dspace.eperson.SubscriptionParameter; +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; + +public class SubscribeMatcher extends BaseMatcher { + + private final DSpaceObject dso; + private final EPerson eperson; + private final List parameters; + private final String type; + + private SubscribeMatcher(DSpaceObject dso, EPerson eperson, String type, List parameters) { + this.dso = dso; + this.eperson = eperson; + this.parameters = parameters; + this.type = type; + } + + public static SubscribeMatcher matches(DSpaceObject dso, EPerson ePerson, String type, + List parameters) { + return new SubscribeMatcher(dso, ePerson, type, parameters); + } + + @Override + public boolean matches(Object subscription) { + Subscription s = (Subscription) subscription; + return s.getEPerson().equals(eperson) + && s.getDSpaceObject().equals(dso) + && s.getSubscriptionType().equals(type) + && checkParameters(s.getSubscriptionParameterList()); + } + + private Boolean checkParameters(List parameters) { + if (parameters.size() != this.parameters.size()) { + return false; + } + // FIXME: for check purpose we rely on name and value. Evaluate to extend or refactor this part + for (int i = 0; i < parameters.size(); i++) { + SubscriptionParameter parameter = parameters.get(i); + SubscriptionParameter match = this.parameters.get(i); + boolean differentName = !parameter.getName().equals((match.getName())); + if (differentName) { + return false; + } + boolean differentValue = !parameter.getValue().equals((match.getValue())); + if (differentValue) { + return false; + } + } + return true; + } + + @Override + public void describeTo(Description description) { + String subscription = String.format("Type: %s, eperson: %s, dso: %s, params: %s", + type, eperson.getID(), dso.getID(), parameters.stream() + .map(p -> "{ name: " + p.getName() + + ", value: " + p.getValue() + + "}") + .collect(Collectors.joining(", "))); + description.appendText("Subscription matching: " + subscription); + } +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java b/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java new file mode 100644 index 000000000000..f2e528d78cd6 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/OrcidQueueConsumerIT.java @@ -0,0 +1,791 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid; + +import static org.dspace.app.matcher.OrcidQueueMatcher.matches; +import static org.dspace.builder.OrcidHistoryBuilder.createOrcidHistory; +import static org.dspace.builder.RelationshipTypeBuilder.createRelationshipTypeBuilder; +import static org.dspace.orcid.OrcidOperation.DELETE; +import static org.dspace.orcid.OrcidOperation.INSERT; +import static org.dspace.orcid.OrcidOperation.UPDATE; +import static org.dspace.orcid.model.OrcidProfileSectionType.KEYWORDS; +import static org.dspace.profile.OrcidEntitySyncPreference.ALL; +import static org.dspace.profile.OrcidEntitySyncPreference.DISABLED; +import static org.dspace.profile.OrcidProfileSyncPreference.BIOGRAPHICAL; +import static org.dspace.profile.OrcidProfileSyncPreference.IDENTIFIERS; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; + +import java.sql.SQLException; +import java.time.Instant; +import java.util.Date; +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.OrcidHistoryBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.content.Collection; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.RelationshipType; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.orcid.consumer.OrcidQueueConsumer; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Integration tests for {@link OrcidQueueConsumer}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidQueueConsumerIT extends AbstractIntegrationTestWithDatabase { + + private OrcidQueueService orcidQueueService = OrcidServiceFactory.getInstance().getOrcidQueueService(); + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + + private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + private Collection profileCollection; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent community") + .build(); + + profileCollection = createCollection("Profiles", "Person"); + + context.restoreAuthSystemState(); + } + + @After + @Override + public void destroy() throws Exception { + List records = orcidQueueService.findAll(context); + for (OrcidQueue record : records) { + orcidQueueService.delete(context, record); + } + context.setDispatcher(null); + + super.destroy(); + } + + @Test + public void testWithNotOrcidSynchronizationEntity() throws Exception { + + context.turnOffAuthorisationSystem(); + + Collection orgUnits = CollectionBuilder.createCollection(context, parentCommunity) + .withName("OrgUnits") + .withEntityType("OrgUnit") + .build(); + + ItemBuilder.createItem(context, orgUnits) + .withTitle("Test OrgUnit") + .withSubject("test") + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, empty()); + } + + @Test + public void testWithOrcidSynchronizationDisabled() throws Exception { + + configurationService.setProperty("orcid.synchronization-enabled", false); + + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("test") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .withOrcidSynchronizationProfilePreference(IDENTIFIERS) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, empty()); + } + + @Test + public void testOrcidQueueRecordCreationForProfile() throws Exception { + // Set a fake handle prefix for this test which we will use to assign handles below + configurationService.setProperty("handle.prefix", "fake-handle"); + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("test") + .withHandle("fake-handle/190") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .withOrcidSynchronizationProfilePreference(IDENTIFIERS) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(2)); + assertThat(queueRecords, hasItem(matches(profile, profile, "KEYWORDS", null, + "dc.subject::test", "test", INSERT))); + assertThat(queueRecords, hasItem(matches(profile, "RESEARCHER_URLS", null, + "dc.identifier.uri::http://localhost:4000/handle/fake-handle/190", + "http://localhost:4000/handle/fake-handle/190", INSERT))); + + addMetadata(profile, "person", "name", "variant", "User Test", null); + context.commit(); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(3)); + assertThat(queueRecords, hasItem( + matches(profile, profile, "KEYWORDS", null, "dc.subject::test", "test", INSERT))); + assertThat(queueRecords, hasItem(matches(profile, "RESEARCHER_URLS", null, + "dc.identifier.uri::http://localhost:4000/handle/fake-handle/190", + "http://localhost:4000/handle/fake-handle/190", INSERT))); + assertThat(queueRecords, hasItem(matches(profile, profile, "OTHER_NAMES", + null, "person.name.variant::User Test", "User Test", INSERT))); + } + + @Test + public void testOrcidQueueRecordCreationForProfileWithSameMetadataPreviouslyDeleted() throws Exception { + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.INSERT) + .withTimestamp(Date.from(Instant.ofEpochMilli(100000))) + .withStatus(201) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.DELETE) + .withTimestamp(Date.from(Instant.ofEpochMilli(200000))) + .withStatus(204) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + addMetadata(profile, "person", "country", null, "IT", null); + context.commit(); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords.get(0), matches(profile, "COUNTRY", null, "person.country::IT", "IT", INSERT)); + } + + @Test + public void testOrcidQueueRecordCreationForProfileWithMetadataPreviouslyDeletedAndThenInsertedAgain() + throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.INSERT) + .withTimestamp(Date.from(Instant.ofEpochMilli(100000))) + .withStatus(201) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.DELETE) + .withTimestamp(Date.from(Instant.ofEpochMilli(200000))) + .withStatus(204) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.INSERT) + .withTimestamp(Date.from(Instant.ofEpochMilli(300000))) + .withStatus(201) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + addMetadata(profile, "person", "country", null, "IT", null); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + } + + @Test + public void testOrcidQueueRecordCreationForProfileWithNotSuccessfullyMetadataDeletion() + throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.INSERT) + .withTimestamp(Date.from(Instant.ofEpochMilli(100000))) + .withStatus(201) + .build(); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType("COUNTRY") + .withMetadata("person.country::IT") + .withPutCode("123456") + .withOperation(OrcidOperation.DELETE) + .withTimestamp(Date.from(Instant.ofEpochMilli(200000))) + .withStatus(400) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + addMetadata(profile, "person", "country", null, "IT", null); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + } + + @Test + public void testOrcidQueueRecordCreationAndDeletion() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("Science") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List records = orcidQueueService.findAll(context); + assertThat(records, hasSize(1)); + assertThat(records, hasItem(matches(item, KEYWORDS.name(), null, "dc.subject::Science", "Science", INSERT))); + + removeMetadata(item, "dc", "subject", null); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + } + + @Test + public void testOrcidQueueRecordCreationAndDeletionWithOrcidHistoryInsertionInTheMiddle() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("Science") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List records = orcidQueueService.findAll(context); + assertThat(records, hasSize(1)); + assertThat(records, hasItem(matches(item, KEYWORDS.name(), null, "dc.subject::Science", "Science", INSERT))); + + OrcidHistoryBuilder.createOrcidHistory(context, item, item) + .withPutCode("12345") + .withMetadata("dc.subject::Science") + .withDescription("Science") + .withRecordType(KEYWORDS.name()) + .withOperation(INSERT) + .withStatus(201) + .build(); + + removeMetadata(item, "dc", "subject", null); + context.commit(); + + records = orcidQueueService.findAll(context); + assertThat(records, hasSize(1)); + assertThat(records, hasItem(matches(item, KEYWORDS.name(), "12345", "dc.subject::Science", "Science", DELETE))); + + } + + @Test + public void testOrcidQueueRecordCreationAndDeletionWithFailedOrcidHistoryInsertionInTheMiddle() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("Science") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List records = orcidQueueService.findAll(context); + assertThat(records, hasSize(1)); + assertThat(records, hasItem(matches(item, KEYWORDS.name(), null, "dc.subject::Science", "Science", INSERT))); + + OrcidHistoryBuilder.createOrcidHistory(context, item, item) + .withPutCode("12345") + .withMetadata("dc.subject::Science") + .withDescription("Science") + .withRecordType(KEYWORDS.name()) + .withOperation(INSERT) + .withStatus(400) + .build(); + + removeMetadata(item, "dc", "subject", null); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + } + + @Test + public void testNoOrcidQueueRecordCreationOccursIfProfileSynchronizationIsDisabled() throws SQLException { + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + } + + @Test + public void testNoOrcidQueueRecordCreationOccursIfNoComplianceMetadataArePresent() throws SQLException { + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + } + + @Test + public void testOrcidQueueRecordCreationForPublication() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationPublicationsPreference(ALL) + .build(); + + Collection publicationCollection = createCollection("Publications", "Publication"); + + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test publication") + .withAuthor("Test User") + .build(); + + EntityType publicationType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType personType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + RelationshipType isAuthorOfPublication = createRelationshipTypeBuilder(context, personType, publicationType, + "isAuthorOfPublication", "isPublicationOfAuthor", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, profile, publication, isAuthorOfPublication).build(); + + context.restoreAuthSystemState(); + context.commit(); + + List orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", INSERT)); + + addMetadata(publication, "dc", "contributor", "editor", "Editor", null); + context.commit(); + + List newOrcidQueueRecords = orcidQueueService.findAll(context); + assertThat(newOrcidQueueRecords, hasSize(1)); + + assertThat(orcidQueueRecords.get(0), equalTo(newOrcidQueueRecords.get(0))); + } + + @Test + public void testOrcidQueueRecordCreationToUpdatePublication() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationPublicationsPreference(ALL) + .build(); + + Collection publicationCollection = createCollection("Publications", "Publication"); + + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test publication") + .withAuthor("Test User") + .build(); + + createOrcidHistory(context, profile, publication) + .withPutCode("123456") + .withOperation(INSERT) + .build(); + + EntityType publicationType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType personType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + RelationshipType isAuthorOfPublication = createRelationshipTypeBuilder(context, personType, publicationType, + "isAuthorOfPublication", "isPublicationOfAuthor", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, profile, publication, isAuthorOfPublication).build(); + + context.restoreAuthSystemState(); + context.commit(); + + List orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, publication, "Publication", "123456", UPDATE)); + } + + @Test + public void testNoOrcidQueueRecordCreationOccursIfPublicationSynchronizationIsDisabled() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .build(); + + Collection publicationCollection = createCollection("Publications", "Publication"); + + Item publication = ItemBuilder.createItem(context, publicationCollection) + .withTitle("Test publication") + .withAuthor("Test User") + .build(); + + EntityType publicationType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); + EntityType personType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + RelationshipType isAuthorOfPublication = createRelationshipTypeBuilder(context, personType, publicationType, + "isAuthorOfPublication", "isPublicationOfAuthor", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, profile, publication, isAuthorOfPublication).build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + addMetadata(profile, "dspace", "orcid", "sync-publications", DISABLED.name(), null); + addMetadata(publication, "dc", "date", "issued", "2021-01-01", null); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + } + + @Test + public void testOrcidQueueRecordCreationToUpdateProject() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationFundingsPreference(ALL) + .build(); + + Collection projectCollection = createCollection("Projects", "Project"); + + Item project = ItemBuilder.createItem(context, projectCollection) + .withTitle("Test project") + .build(); + + createOrcidHistory(context, profile, project) + .withPutCode("123456") + .build(); + + EntityType projectType = EntityTypeBuilder.createEntityTypeBuilder(context, "Project").build(); + EntityType personType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + RelationshipType isProjectOfPerson = createRelationshipTypeBuilder(context, projectType, personType, + "isProjectOfPerson", "isPersonOfProject", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, project, profile, isProjectOfPerson).build(); + + context.restoreAuthSystemState(); + context.commit(); + + List orcidQueueRecords = orcidQueueService.findAll(context); + assertThat(orcidQueueRecords, hasSize(1)); + assertThat(orcidQueueRecords.get(0), matches(profile, project, "Project", "123456", UPDATE)); + } + + @Test + public void testNoOrcidQueueRecordCreationOccursForNotConfiguredEntities() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .build(); + + Collection projectCollection = createCollection("Projects", "Project"); + + Item project = ItemBuilder.createItem(context, projectCollection) + .withTitle("Test project") + .withProjectInvestigator("Test User") + .build(); + + EntityType projectType = EntityTypeBuilder.createEntityTypeBuilder(context, "Project").build(); + EntityType personType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build(); + + RelationshipType isProjectOfPerson = createRelationshipTypeBuilder(context, projectType, personType, + "isProjectOfPerson", "isPersonOfProject", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, project, profile, isProjectOfPerson).build(); + + context.restoreAuthSystemState(); + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + } + + @Test + public void testOrcidQueueRecalculationOnProfilePreferenceUpdate() throws Exception { + // Set a fake handle prefix for this test which we will use to assign handles below + configurationService.setProperty("handle.prefix", "fake-handle"); + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-0000-0012-2345") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withSubject("Math") + .withHandle("fake-handle/200") + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List records = orcidQueueService.findAll(context); + assertThat(records, hasSize(1)); + assertThat(records, hasItem(matches(profile, "KEYWORDS", null, "dc.subject::Math", "Math", INSERT))); + + addMetadata(profile, "person", "identifier", "rid", "ID", null); + addMetadata(profile, "dspace", "orcid", "sync-profile", IDENTIFIERS.name(), null); + + context.commit(); + + records = orcidQueueService.findAll(context); + assertThat(records, hasSize(3)); + assertThat(records, hasItem(matches(profile, "KEYWORDS", null, "dc.subject::Math", "Math", INSERT))); + assertThat(records, hasItem(matches(profile, "EXTERNAL_IDS", null, "person.identifier.rid::ID", "ID", INSERT))); + assertThat(records, hasItem(matches(profile, "RESEARCHER_URLS", null, + "dc.identifier.uri::http://localhost:4000/handle/fake-handle/200", + "http://localhost:4000/handle/fake-handle/200", INSERT))); + + removeMetadata(profile, "dspace", "orcid", "sync-profile"); + + context.commit(); + + assertThat(orcidQueueService.findAll(context), empty()); + + } + + @Test + public void testWithManyInsertionAndDeletionOfSameMetadataValue() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item profile = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test User") + .withOrcidIdentifier("0000-1111-2222-3333") + .withOrcidAccessToken("ab4d18a0-8d9a-40f1-b601-a417255c8d20", eperson) + .withOrcidSynchronizationProfilePreference(BIOGRAPHICAL) + .withSubject("Science") + .build(); + + context.restoreAuthSystemState(); + context.commit(); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords.get(0), matches(profile, "KEYWORDS", null, + "dc.subject::Science", "Science", INSERT)); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType(KEYWORDS.name()) + .withDescription("Science") + .withMetadata("dc.subject::Science") + .withOperation(OrcidOperation.INSERT) + .withPutCode("12345") + .withStatus(201) + .build(); + + removeMetadata(profile, "dc", "subject", null); + + context.commit(); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords.get(0), matches(profile, "KEYWORDS", "12345", + "dc.subject::Science", "Science", DELETE)); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType(KEYWORDS.name()) + .withDescription("Science") + .withMetadata("dc.subject::Science") + .withOperation(OrcidOperation.DELETE) + .withStatus(204) + .build(); + + addMetadata(profile, "dc", "subject", null, "Science", null); + + context.commit(); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords.get(0), matches(profile, "KEYWORDS", null, + "dc.subject::Science", "Science", INSERT)); + + OrcidHistoryBuilder.createOrcidHistory(context, profile, profile) + .withRecordType(KEYWORDS.name()) + .withDescription("Science") + .withMetadata("dc.subject::Science") + .withOperation(OrcidOperation.INSERT) + .withPutCode("12346") + .withStatus(201) + .build(); + + removeMetadata(profile, "dc", "subject", null); + + context.commit(); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords.get(0), matches(profile, "KEYWORDS", "12346", + "dc.subject::Science", "Science", DELETE)); + + } + + private void addMetadata(Item item, String schema, String element, String qualifier, String value, + String authority) throws Exception { + context.turnOffAuthorisationSystem(); + item = context.reloadEntity(item); + itemService.addMetadata(context, item, schema, element, qualifier, null, value, authority, 600); + itemService.update(context, item); + context.restoreAuthSystemState(); + } + + private void removeMetadata(Item item, String schema, String element, String qualifier) throws Exception { + context.turnOffAuthorisationSystem(); + item = context.reloadEntity(item); + List metadata = itemService.getMetadata(item, schema, element, qualifier, Item.ANY); + itemService.removeMetadataValues(context, item, metadata); + itemService.update(context, item); + context.restoreAuthSystemState(); + } + + private Collection createCollection(String name, String entityType) { + return CollectionBuilder.createCollection(context, parentCommunity) + .withName(name) + .withEntityType(entityType) + .build(); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/model/validator/OrcidValidatorTest.java b/dspace-api/src/test/java/org/dspace/orcid/model/validator/OrcidValidatorTest.java new file mode 100644 index 000000000000..20cad9ce2c92 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/model/validator/OrcidValidatorTest.java @@ -0,0 +1,662 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.model.validator; + +import static org.dspace.orcid.model.validator.OrcidValidationError.AMOUNT_CURRENCY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATED_ORGANIZATION_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATED_ORGANIZATION_VALUE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATION_SOURCE_INVALID; +import static org.dspace.orcid.model.validator.OrcidValidationError.DISAMBIGUATION_SOURCE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.EXTERNAL_ID_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.FUNDER_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_ADDRESS_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_CITY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_COUNTRY_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.ORGANIZATION_NAME_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.PUBLICATION_DATE_INVALID; +import static org.dspace.orcid.model.validator.OrcidValidationError.TITLE_REQUIRED; +import static org.dspace.orcid.model.validator.OrcidValidationError.TYPE_REQUIRED; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.mockito.Mockito.when; + +import java.util.List; + +import org.dspace.orcid.model.validator.impl.OrcidValidatorImpl; +import org.dspace.services.ConfigurationService; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.orcid.jaxb.model.common.Iso3166Country; +import org.orcid.jaxb.model.common.Relationship; +import org.orcid.jaxb.model.common.WorkType; +import org.orcid.jaxb.model.v3.release.common.Amount; +import org.orcid.jaxb.model.v3.release.common.DisambiguatedOrganization; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.OrganizationAddress; +import org.orcid.jaxb.model.v3.release.common.PublicationDate; +import org.orcid.jaxb.model.v3.release.common.Title; +import org.orcid.jaxb.model.v3.release.common.Year; +import org.orcid.jaxb.model.v3.release.record.ExternalID; +import org.orcid.jaxb.model.v3.release.record.ExternalIDs; +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.FundingTitle; +import org.orcid.jaxb.model.v3.release.record.Work; +import org.orcid.jaxb.model.v3.release.record.WorkTitle; + +/** + * Unit tests for {@link OrcidValidatorImpl} + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@RunWith(MockitoJUnitRunner.class) +public class OrcidValidatorTest { + + @Mock(lenient = true) + private ConfigurationService configurationService; + + @InjectMocks + private OrcidValidatorImpl validator; + + @Before + public void before() { + when(configurationService.getBooleanProperty("orcid.validation.work.enabled", true)).thenReturn(true); + when(configurationService.getBooleanProperty("orcid.validation.funding.enabled", true)).thenReturn(true); + when(configurationService.getArrayProperty("orcid.validation.organization.identifier-sources")) + .thenReturn(new String[] { "RINGGOLD", "GRID", "FUNDREF", "LEI" }); + } + + @Test + public void testWorkWithoutTitleAndTypeAndExternalIds() { + + List errors = validator.validateWork(new Work()); + assertThat(errors, hasSize(3)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED, TYPE_REQUIRED, EXTERNAL_ID_REQUIRED)); + } + + @Test + public void testWorkWithoutWorkTitle() { + + Work work = new Work(); + work.setWorkType(WorkType.DATA_SET); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testWorkWithoutTitle() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.setWorkType(WorkType.DATA_SET); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testWorkWithNullTitle() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title(null)); + work.setWorkType(WorkType.DATA_SET); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testWorkWithEmptyTitle() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("")); + work.setWorkType(WorkType.DATA_SET); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testWorkWithoutType() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TYPE_REQUIRED)); + } + + @Test + public void testWorkWithoutExternalIds() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkType(WorkType.DATA_SET); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(EXTERNAL_ID_REQUIRED)); + } + + @Test + public void testWorkWithEmptyExternalIds() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkType(WorkType.DATA_SET); + work.setWorkExternalIdentifiers(new ExternalIDs()); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(EXTERNAL_ID_REQUIRED)); + } + + @Test + public void testdWorkWithPublicationDateWithoutYear() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.setWorkType(WorkType.DATA_SET); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + PublicationDate publicationDate = new PublicationDate(); + work.setPublicationDate(publicationDate); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(PUBLICATION_DATE_INVALID)); + } + + @Test + public void testdWorkWithPublicationDateWithInvalidYear() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.setWorkType(WorkType.DATA_SET); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + PublicationDate publicationDate = new PublicationDate(); + Year year = new Year(); + year.setValue("INVALID"); + publicationDate.setYear(year); + work.setPublicationDate(publicationDate); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(PUBLICATION_DATE_INVALID)); + } + + @Test + public void testdWorkWithPublicationDateWithYearPriorTo1900() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.setWorkType(WorkType.DATA_SET); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + PublicationDate publicationDate = new PublicationDate(); + publicationDate.setYear(new Year(1850)); + work.setPublicationDate(publicationDate); + + List errors = validator.validateWork(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(PUBLICATION_DATE_INVALID)); + } + + @Test + public void testValidWork() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.setWorkType(WorkType.DATA_SET); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + PublicationDate publicationDate = new PublicationDate(); + publicationDate.setYear(new Year(1956)); + work.setPublicationDate(publicationDate); + + List errors = validator.validateWork(work); + assertThat(errors, empty()); + } + + @Test + public void testFundingWithoutTitleAndExternalIdsAndOrganization() { + + List errors = validator.validateFunding(new Funding()); + assertThat(errors, hasSize(3)); + assertThat(errors, containsInAnyOrder(EXTERNAL_ID_REQUIRED, FUNDER_REQUIRED, TITLE_REQUIRED)); + } + + @Test + public void testFundingWithoutExternalIdsAndOrganization() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Funding title")); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(2)); + assertThat(errors, containsInAnyOrder(EXTERNAL_ID_REQUIRED, FUNDER_REQUIRED)); + } + + @Test + public void testFundingWithoutTitleAndOrganization() { + + Funding funding = new Funding(); + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(2)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED, FUNDER_REQUIRED)); + } + + @Test + public void testFundingWithoutTitleAndExternalIds() { + + Funding funding = new Funding(); + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(2)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED, EXTERNAL_ID_REQUIRED)); + } + + @Test + public void testFundingWithoutTitle() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testFundingWithNullTitle() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title(null)); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testFundingWithEmptyTitle() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testFundingWithEmptyExternalIds() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(EXTERNAL_ID_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutName() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.setName(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(ORGANIZATION_NAME_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithEmptyName() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.setName(""); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(ORGANIZATION_NAME_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutAddress() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.setAddress(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(ORGANIZATION_ADDRESS_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutCity() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.getAddress().setCity(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(ORGANIZATION_CITY_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutCountry() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.getAddress().setCountry(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(ORGANIZATION_COUNTRY_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutDisambiguatedOrganization() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.setDisambiguatedOrganization(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(DISAMBIGUATED_ORGANIZATION_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutDisambiguatedOrganizationId() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.getDisambiguatedOrganization().setDisambiguatedOrganizationIdentifier(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(DISAMBIGUATED_ORGANIZATION_VALUE_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithoutDisambiguatedOrganizationSource() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.getDisambiguatedOrganization().setDisambiguationSource(null); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(DISAMBIGUATION_SOURCE_REQUIRED)); + } + + @Test + public void testFundingWithOrganizationWithInvalidDisambiguationSource() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + Organization organization = buildValidOrganization(); + organization.getDisambiguatedOrganization().setDisambiguationSource("INVALID"); + funding.setOrganization(organization); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(DISAMBIGUATION_SOURCE_INVALID)); + } + + @Test + public void testFundingWithoutAmountCurrency() { + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + funding.setAmount(new Amount()); + funding.getAmount().setContent("20000"); + + List errors = validator.validateFunding(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(AMOUNT_CURRENCY_REQUIRED)); + } + + @Test + public void testValidFunding() { + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("Title")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validateFunding(funding); + assertThat(errors, empty()); + } + + @Test + public void testWithWorkValidationEnabled() { + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("Work title")); + work.setWorkExternalIdentifiers(new ExternalIDs()); + work.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + List errors = validator.validate(work); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TYPE_REQUIRED)); + } + + @Test + public void testWithWorkValidationDisabled() { + + when(configurationService.getBooleanProperty("orcid.validation.work.enabled", true)).thenReturn(false); + + Work work = new Work(); + work.setWorkTitle(new WorkTitle()); + work.getWorkTitle().setTitle(new Title("Work title")); + + List errors = validator.validate(work); + assertThat(errors, empty()); + } + + @Test + public void testWithFundingValidationEnabled() { + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validate(funding); + assertThat(errors, hasSize(1)); + assertThat(errors, containsInAnyOrder(TITLE_REQUIRED)); + } + + @Test + public void testWithFundingValidationDisabled() { + + when(configurationService.getBooleanProperty("orcid.validation.funding.enabled", true)).thenReturn(false); + + Funding funding = new Funding(); + funding.setTitle(new FundingTitle()); + funding.getTitle().setTitle(new Title("")); + + funding.setExternalIdentifiers(new ExternalIDs()); + funding.getExternalIdentifiers().getExternalIdentifier().add(buildValidExternalID()); + + funding.setOrganization(buildValidOrganization()); + + List errors = validator.validate(funding); + assertThat(errors, empty()); + } + + private ExternalID buildValidExternalID() { + ExternalID externalID = new ExternalID(); + externalID.setRelationship(Relationship.SELF); + externalID.setType("TYPE"); + externalID.setValue("VALUE"); + return externalID; + } + + private Organization buildValidOrganization() { + Organization organization = new Organization(); + organization.setName("Organization"); + + OrganizationAddress address = new OrganizationAddress(); + address.setCity("City"); + address.setCountry(Iso3166Country.BA); + organization.setAddress(address); + + DisambiguatedOrganization disambiguatedOrganization = new DisambiguatedOrganization(); + disambiguatedOrganization.setDisambiguatedOrganizationIdentifier("ID"); + disambiguatedOrganization.setDisambiguationSource("LEI"); + organization.setDisambiguatedOrganization(disambiguatedOrganization); + + return organization; + } + +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java b/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java new file mode 100644 index 000000000000..e6ca2a3d9e7e --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/script/OrcidBulkPushIT.java @@ -0,0 +1,556 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.script; + +import static org.dspace.app.launcher.ScriptLauncher.handleScript; +import static org.dspace.app.matcher.LambdaMatcher.matches; +import static org.dspace.app.matcher.OrcidQueueMatcher.matches; +import static org.dspace.builder.OrcidQueueBuilder.createOrcidQueue; +import static org.dspace.orcid.OrcidOperation.DELETE; +import static org.dspace.orcid.OrcidOperation.INSERT; +import static org.dspace.orcid.OrcidOperation.UPDATE; +import static org.dspace.profile.OrcidSynchronizationMode.BATCH; +import static org.dspace.profile.OrcidSynchronizationMode.MANUAL; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +import java.sql.SQLException; +import java.util.List; +import java.util.function.Predicate; + +import org.apache.commons.lang3.ArrayUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.OrcidTokenBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.eperson.EPerson; +import org.dspace.orcid.OrcidHistory; +import org.dspace.orcid.OrcidOperation; +import org.dspace.orcid.OrcidQueue; +import org.dspace.orcid.client.OrcidClient; +import org.dspace.orcid.client.OrcidResponse; +import org.dspace.orcid.exception.OrcidClientException; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.service.OrcidQueueService; +import org.dspace.orcid.service.impl.OrcidHistoryServiceImpl; +import org.dspace.profile.OrcidSynchronizationMode; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Integration tests for {@link OrcidBulkPush}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidBulkPushIT extends AbstractIntegrationTestWithDatabase { + + private Collection profileCollection; + + private Collection publicationCollection; + + private OrcidHistoryServiceImpl orcidHistoryService; + + private OrcidQueueService orcidQueueService; + + private ConfigurationService configurationService; + + private OrcidClient orcidClient; + + private OrcidClient orcidClientMock; + + @Before + public void setup() { + + orcidHistoryService = (OrcidHistoryServiceImpl) OrcidServiceFactory.getInstance().getOrcidHistoryService(); + orcidQueueService = OrcidServiceFactory.getInstance().getOrcidQueueService(); + + configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + context.setCurrentUser(admin); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent community") + .build(); + + profileCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Profiles") + .withEntityType("Person") + .build(); + + publicationCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Publications") + .withEntityType("Publication") + .build(); + + orcidClientMock = mock(OrcidClient.class); + + orcidClient = orcidHistoryService.getOrcidClient(); + orcidHistoryService.setOrcidClient(orcidClientMock); + + } + + @After + public void after() throws SQLException { + List records = orcidHistoryService.findAll(context); + for (OrcidHistory record : records) { + orcidHistoryService.delete(context, record); + } + orcidHistoryService.setOrcidClient(orcidClient); + } + + @Test + public void testWithoutOrcidQueueRecords() throws Exception { + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + assertThat(handler.getInfoMessages(), hasSize(1)); + assertThat(handler.getInfoMessages().get(0), is("Found 0 queue records to synchronize with ORCID")); + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + } + + @Test + public void testWithManyOrcidQueueRecords() throws Exception { + + context.turnOffAuthorisationSystem(); + + EPerson owner = EPersonBuilder.createEPerson(context) + .withEmail("owner@test.it") + .build(); + context.restoreAuthSystemState(); + + Item firstProfileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item secondProfileItem = createProfileItemItem("1111-2222-3333-4444", admin, MANUAL); + Item thirdProfileItem = createProfileItemItem("2222-3333-4444-5555", owner, BATCH); + + Item firstEntity = createPublication("First publication"); + Item secondEntity = createPublication("Second publication"); + Item thirdEntity = createPublication("Third publication"); + Item fourthEntity = createPublication("Fourth publication"); + Item fifthEntity = createPublication("Fifth publication"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenReturn(createdResponse("12345")); + + when(orcidClientMock.update(any(), eq("0000-1111-2222-3333"), any(), eq("98765"))) + .thenReturn(updatedResponse("98765")); + + when(orcidClientMock.deleteByPutCode(any(), eq("0000-1111-2222-3333"), eq("22222"), eq("/work"))) + .thenReturn(deletedResponse()); + + when(orcidClientMock.push(any(), eq("2222-3333-4444-5555"), any())) + .thenReturn(createdResponse("11111")); + + createOrcidQueue(context, firstProfileItem, firstEntity); + createOrcidQueue(context, firstProfileItem, secondEntity, "98765"); + createOrcidQueue(context, firstProfileItem, "Description", "Publication", "22222"); + createOrcidQueue(context, secondProfileItem, thirdEntity); + createOrcidQueue(context, secondProfileItem, fourthEntity); + createOrcidQueue(context, thirdProfileItem, fifthEntity); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + String firstProfileItemId = firstProfileItem.getID().toString(); + String thirdProfileItemId = thirdProfileItem.getID().toString(); + + assertThat(handler.getInfoMessages(), hasSize(9)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 4 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + firstProfileItemId, + "History record created with status 201. The operation was completed successfully", + "Update of Publication for profile with ID: " + firstProfileItemId + " by put code 98765", + "History record created with status 200. The operation was completed successfully", + "Deletion of Publication for profile with ID: " + firstProfileItemId + " by put code 22222", + "History record created with status 204. The operation was completed successfully", + "Addition of Publication for profile with ID: " + thirdProfileItemId, + "History record created with status 201. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verify(orcidClientMock).push(any(), eq("2222-3333-4444-5555"), any()); + verify(orcidClientMock).update(any(), eq("0000-1111-2222-3333"), any(), eq("98765")); + verify(orcidClientMock).deleteByPutCode(any(), eq("0000-1111-2222-3333"), eq("22222"), eq("/work")); + + verifyNoMoreInteractions(orcidClientMock); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(2)); + assertThat(queueRecords, hasItem(matches(secondProfileItem, thirdEntity, "Publication", INSERT, 0))); + assertThat(queueRecords, hasItem(matches(secondProfileItem, fourthEntity, "Publication", INSERT, 0))); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(4)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 201, INSERT)))); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, secondEntity, 200, UPDATE)))); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, 204, DELETE)))); + assertThat(historyRecords, hasItem(matches(history(thirdProfileItem, fifthEntity, 201, INSERT)))); + + } + + @Test + public void testWithVeryLongTitleQueueRecords() throws Exception { + Item firstProfileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item firstEntity = createPublication("Publication with a very very very very very very very very very " + + "very very very very very very very very very very very very very very very very very very very very " + + "very very very very very very very very very very very very very very very very very even " + + "extremely long title"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenReturn(createdResponse("12345")); + + when(orcidClientMock.update(any(), eq("0000-1111-2222-3333"), any(), eq("98765"))) + .thenReturn(updatedResponse("98765")); + + when(orcidClientMock.deleteByPutCode( + any(), + eq("0000-1111-2222-3333"), + eq("22222"), + eq("/work")) + ).thenReturn(deletedResponse()); + + createOrcidQueue(context, firstProfileItem, firstEntity); + createOrcidQueue(context, firstProfileItem, "Description", "Publication", "22222"); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + String firstProfileItemId = firstProfileItem.getID().toString(); + + assertThat(handler.getInfoMessages(), hasSize(5)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 2 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + firstProfileItemId, + "History record created with status 201. The operation was completed successfully", + "Deletion of Publication for profile with ID: " + firstProfileItemId + " by put code 22222", + "History record created with status 204. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verify(orcidClientMock).deleteByPutCode( + any(), + eq("0000-1111-2222-3333"), + eq("22222"), + eq("/work")); + + verifyNoMoreInteractions(orcidClientMock); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 201, INSERT)))); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, 204, DELETE)))); + } + + @Test + public void testWithOneValidationError() throws Exception { + + Item firstProfileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item secondProfileItem = createProfileItemItem("1111-2222-3333-4444", admin, BATCH); + + Item firstEntity = createPublication("First publication"); + Item secondEntity = createPublication(""); + Item thirdEntity = createPublication("Third publication"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenReturn(createdResponse("12345")); + + when(orcidClientMock.push(any(), eq("1111-2222-3333-4444"), any())) + .thenReturn(createdResponse("55555")); + + createOrcidQueue(context, firstProfileItem, firstEntity); + createOrcidQueue(context, firstProfileItem, secondEntity, "98765"); + createOrcidQueue(context, secondProfileItem, thirdEntity); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + assertThat(handler.getInfoMessages(), hasSize(6)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 3 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + firstProfileItem.getID().toString(), + "History record created with status 201. The operation was completed successfully", + "Update of Publication for profile with ID: " + firstProfileItem.getID().toString() + " by put code 98765", + "Addition of Publication for profile with ID: " + secondProfileItem.getID().toString(), + "History record created with status 201. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), hasSize(1)); + assertThat(handler.getErrorMessages(), containsInAnyOrder( + "Errors occurs during ORCID object validation. Error codes: title.required")); + + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verify(orcidClientMock).push(any(), eq("1111-2222-3333-4444"), any()); + verifyNoMoreInteractions(orcidClientMock); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(firstProfileItem, secondEntity, "Publication", UPDATE, 1))); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 201, INSERT)))); + assertThat(historyRecords, hasItem(matches(history(secondProfileItem, thirdEntity, 201, INSERT)))); + + } + + @Test + public void testWithUnexpectedErrorForMissingOrcid() throws Exception { + + Item firstProfileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item secondProfileItem = createProfileItemItem("", admin, BATCH); + + Item firstEntity = createPublication("First publication"); + Item secondEntity = createPublication("Second publication"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenReturn(createdResponse("12345")); + + createOrcidQueue(context, secondProfileItem, secondEntity); + createOrcidQueue(context, firstProfileItem, firstEntity); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + assertThat(handler.getInfoMessages(), hasSize(4)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 2 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + secondProfileItem.getID().toString(), + "Addition of Publication for profile with ID: " + firstProfileItem.getID().toString(), + "History record created with status 201. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), hasSize(1)); + assertThat(handler.getErrorMessages(), contains("An unexpected error occurs during the synchronization: " + + "The related profileItem item (id = " + secondProfileItem.getID() + ") does not have an orcid")); + + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verifyNoMoreInteractions(orcidClientMock); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(secondProfileItem, secondEntity, "Publication", INSERT, 1))); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(1)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 201, INSERT)))); + + } + + @Test + public void testWithOrcidClientException() throws Exception { + + Item firstProfileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item secondProfileItem = createProfileItemItem("1111-2222-3333-4444", admin, BATCH); + + Item firstEntity = createPublication("First publication"); + Item secondEntity = createPublication("Second publication"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenThrow(new OrcidClientException(400, "Bad request")); + + when(orcidClientMock.push(any(), eq("1111-2222-3333-4444"), any())) + .thenReturn(createdResponse("55555")); + + createOrcidQueue(context, firstProfileItem, firstEntity); + createOrcidQueue(context, secondProfileItem, secondEntity); + + context.commit(); + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + + assertThat(handler.getInfoMessages(), hasSize(5)); + assertThat(handler.getInfoMessages(), containsInAnyOrder( + "Found 2 queue records to synchronize with ORCID", + "Addition of Publication for profile with ID: " + firstProfileItem.getID().toString(), + "History record created with status 400. The resource sent to ORCID registry is not valid", + "Addition of Publication for profile with ID: " + secondProfileItem.getID().toString(), + "History record created with status 201. The operation was completed successfully")); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + verify(orcidClientMock).push(any(), eq("0000-1111-2222-3333"), any()); + verify(orcidClientMock).push(any(), eq("1111-2222-3333-4444"), any()); + verifyNoMoreInteractions(orcidClientMock); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(firstProfileItem, firstEntity, "Publication", INSERT, 1))); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, hasItem(matches(history(firstProfileItem, firstEntity, 400, INSERT)))); + assertThat(historyRecords, hasItem(matches(history(secondProfileItem, secondEntity, 201, INSERT)))); + + } + + @Test + @SuppressWarnings("unchecked") + public void testWithTooManyAttempts() throws Exception { + + configurationService.setProperty("orcid.bulk-synchronization.max-attempts", 2); + + Item profileItem = createProfileItemItem("0000-1111-2222-3333", eperson, BATCH); + Item entity = createPublication("First publication"); + + when(orcidClientMock.push(any(), eq("0000-1111-2222-3333"), any())) + .thenThrow(new OrcidClientException(400, "Bad request")); + + createOrcidQueue(context, profileItem, entity); + + // First attempt + + TestDSpaceRunnableHandler handler = runBulkSynchronization(false); + assertThat(handler.getInfoMessages(), hasItem("Found 1 queue records to synchronize with ORCID")); + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + List queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(profileItem, entity, "Publication", INSERT, 1))); + + List historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(1)); + assertThat(historyRecords, hasItem(matches(history(profileItem, entity, 400, INSERT)))); + + // Second attempt + + handler = runBulkSynchronization(false); + assertThat(handler.getInfoMessages(), hasItem("Found 1 queue records to synchronize with ORCID")); + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(profileItem, entity, "Publication", INSERT, 2))); + + historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, contains(matches(history(profileItem, entity, 400, INSERT)), + matches(history(profileItem, entity, 400, INSERT)))); + + // Third attempt + + handler = runBulkSynchronization(false); + assertThat(handler.getInfoMessages(), hasItem("Found 0 queue records to synchronize with ORCID")); + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(profileItem, entity, "Publication", INSERT, 2))); + + historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(2)); + assertThat(historyRecords, contains(matches(history(profileItem, entity, 400, INSERT)), + matches(history(profileItem, entity, 400, INSERT)))); + + // Fourth attempt forcing synchronization + + handler = runBulkSynchronization(true); + assertThat(handler.getInfoMessages(), hasItem("Found 1 queue records to synchronize with ORCID")); + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + + queueRecords = orcidQueueService.findAll(context); + assertThat(queueRecords, hasSize(1)); + assertThat(queueRecords, hasItem(matches(profileItem, entity, "Publication", INSERT, 3))); + + historyRecords = orcidHistoryService.findAll(context); + assertThat(historyRecords, hasSize(3)); + assertThat(historyRecords, contains(matches(history(profileItem, entity, 400, INSERT)), + matches(history(profileItem, entity, 400, INSERT)), + matches(history(profileItem, entity, 400, INSERT)))); + } + + private Predicate history(Item profileItem, Item entity, int status, OrcidOperation operation) { + return history -> profileItem.equals(history.getProfileItem()) + && entity.equals(history.getEntity()) + && history.getStatus().equals(status) + && operation == history.getOperation(); + } + + private Predicate history(Item profileItem, int status, OrcidOperation operation) { + return history -> profileItem.equals(history.getProfileItem()) + && history.getStatus().equals(status) + && operation == history.getOperation(); + } + + private TestDSpaceRunnableHandler runBulkSynchronization(boolean forceSynchronization) throws Exception { + String[] args = new String[] { "orcid-bulk-push" }; + args = forceSynchronization ? ArrayUtils.add(args, "-f") : args; + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + return handler; + } + + private Item createProfileItemItem(String orcid, EPerson owner, OrcidSynchronizationMode mode) + throws Exception { + + Item item = ItemBuilder.createItem(context, profileCollection) + .withTitle("Test user") + .withOrcidIdentifier(orcid) + .withOrcidSynchronizationMode(mode) + .withDspaceObjectOwner(owner.getFullName(), owner.getID().toString()) + .build(); + + OrcidTokenBuilder.create(context, owner, "9c913f57-961e-48af-9223-cfad6562c925") + .withProfileItem(item) + .build(); + + return item; + } + + private Item createPublication(String title) { + return ItemBuilder.createItem(context, publicationCollection) + .withTitle(title) + .withType("Controlled Vocabulary for Resource Type Genres::dataset") + .build(); + } + + private OrcidResponse createdResponse(String putCode) { + return new OrcidResponse(201, putCode, null); + } + + private OrcidResponse updatedResponse(String putCode) { + return new OrcidResponse(200, putCode, null); + } + + private OrcidResponse deletedResponse() { + return new OrcidResponse(204, null, null); + } +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidEntityFactoryServiceIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidEntityFactoryServiceIT.java new file mode 100644 index 000000000000..17bc6ee531c3 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidEntityFactoryServiceIT.java @@ -0,0 +1,296 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import static org.apache.commons.lang.StringUtils.endsWith; +import static org.dspace.app.matcher.LambdaMatcher.has; +import static org.dspace.app.matcher.LambdaMatcher.matches; +import static org.dspace.builder.RelationshipTypeBuilder.createRelationshipTypeBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.orcid.jaxb.model.common.ContributorRole.AUTHOR; +import static org.orcid.jaxb.model.common.ContributorRole.EDITOR; +import static org.orcid.jaxb.model.common.FundingContributorRole.LEAD; +import static org.orcid.jaxb.model.common.SequenceType.ADDITIONAL; +import static org.orcid.jaxb.model.common.SequenceType.FIRST; + +import java.util.List; +import java.util.function.Predicate; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EntityTypeBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.RelationshipBuilder; +import org.dspace.content.Collection; +import org.dspace.content.EntityType; +import org.dspace.content.Item; +import org.dspace.content.RelationshipType; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.junit.Before; +import org.junit.Test; +import org.orcid.jaxb.model.common.ContributorRole; +import org.orcid.jaxb.model.common.FundingContributorRole; +import org.orcid.jaxb.model.common.Iso3166Country; +import org.orcid.jaxb.model.common.Relationship; +import org.orcid.jaxb.model.common.SequenceType; +import org.orcid.jaxb.model.common.WorkType; +import org.orcid.jaxb.model.v3.release.common.Contributor; +import org.orcid.jaxb.model.v3.release.common.FuzzyDate; +import org.orcid.jaxb.model.v3.release.common.Organization; +import org.orcid.jaxb.model.v3.release.common.Url; +import org.orcid.jaxb.model.v3.release.record.Activity; +import org.orcid.jaxb.model.v3.release.record.ExternalID; +import org.orcid.jaxb.model.v3.release.record.Funding; +import org.orcid.jaxb.model.v3.release.record.FundingContributor; +import org.orcid.jaxb.model.v3.release.record.FundingContributors; +import org.orcid.jaxb.model.v3.release.record.Work; + +/** + * Integration tests for {@link OrcidEntityFactoryService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidEntityFactoryServiceIT extends AbstractIntegrationTestWithDatabase { + + private OrcidEntityFactoryService entityFactoryService; + + private Collection orgUnits; + + private Collection publications; + + private Collection projects; + + @Before + public void setup() { + + entityFactoryService = OrcidServiceFactory.getInstance().getOrcidEntityFactoryService(); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withTitle("Parent community") + .build(); + + orgUnits = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("OrgUnit") + .build(); + + publications = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Publication") + .build(); + + projects = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Project") + .build(); + + context.restoreAuthSystemState(); + } + + @Test + public void testWorkCreation() { + + context.turnOffAuthorisationSystem(); + + Item publication = ItemBuilder.createItem(context, publications) + .withTitle("Test publication") + .withAuthor("Walter White") + .withAuthor("Jesse Pinkman") + .withEditor("Editor") + .withIssueDate("2021-04-30") + .withDescriptionAbstract("Publication description") + .withLanguage("en_US") + .withType("Book") + .withIsPartOf("Journal") + .withDoiIdentifier("doi-id") + .withScopusIdentifier("scopus-id") + .build(); + + context.restoreAuthSystemState(); + + Activity activity = entityFactoryService.createOrcidObject(context, publication); + assertThat(activity, instanceOf(Work.class)); + + Work work = (Work) activity; + assertThat(work.getJournalTitle(), notNullValue()); + assertThat(work.getJournalTitle().getContent(), is("Journal")); + assertThat(work.getLanguageCode(), is("en")); + assertThat(work.getPublicationDate(), matches(date("2021", "04", "30"))); + assertThat(work.getShortDescription(), is("Publication description")); + assertThat(work.getPutCode(), nullValue()); + assertThat(work.getWorkType(), is(WorkType.BOOK)); + assertThat(work.getWorkTitle(), notNullValue()); + assertThat(work.getWorkTitle().getTitle(), notNullValue()); + assertThat(work.getWorkTitle().getTitle().getContent(), is("Test publication")); + assertThat(work.getWorkContributors(), notNullValue()); + assertThat(work.getUrl(), matches(urlEndsWith(publication.getHandle()))); + + List contributors = work.getWorkContributors().getContributor(); + assertThat(contributors, hasSize(3)); + assertThat(contributors, has(contributor("Walter White", AUTHOR, FIRST))); + assertThat(contributors, has(contributor("Editor", EDITOR, FIRST))); + assertThat(contributors, has(contributor("Jesse Pinkman", AUTHOR, ADDITIONAL))); + + assertThat(work.getExternalIdentifiers(), notNullValue()); + + List externalIds = work.getExternalIdentifiers().getExternalIdentifier(); + assertThat(externalIds, hasSize(3)); + assertThat(externalIds, has(selfExternalId("doi", "doi-id"))); + assertThat(externalIds, has(selfExternalId("eid", "scopus-id"))); + assertThat(externalIds, has(selfExternalId("handle", publication.getHandle()))); + + } + + @Test + public void testEmptyWorkWithUnknownTypeCreation() { + + context.turnOffAuthorisationSystem(); + + Item publication = ItemBuilder.createItem(context, publications) + .withType("TYPE") + .build(); + + context.restoreAuthSystemState(); + + Activity activity = entityFactoryService.createOrcidObject(context, publication); + assertThat(activity, instanceOf(Work.class)); + + Work work = (Work) activity; + assertThat(work.getJournalTitle(), nullValue()); + assertThat(work.getLanguageCode(), nullValue()); + assertThat(work.getPublicationDate(), nullValue()); + assertThat(work.getShortDescription(), nullValue()); + assertThat(work.getPutCode(), nullValue()); + assertThat(work.getWorkType(), is(WorkType.OTHER)); + assertThat(work.getWorkTitle(), nullValue()); + assertThat(work.getWorkContributors(), notNullValue()); + assertThat(work.getWorkContributors().getContributor(), empty()); + assertThat(work.getExternalIdentifiers(), notNullValue()); + + List externalIds = work.getExternalIdentifiers().getExternalIdentifier(); + assertThat(externalIds, hasSize(1)); + assertThat(externalIds, has(selfExternalId("handle", publication.getHandle()))); + } + + @Test + public void testFundingCreation() { + context.turnOffAuthorisationSystem(); + + Item orgUnit = ItemBuilder.createItem(context, orgUnits) + .withOrgUnitLegalName("4Science") + .withOrgUnitCountry("IT") + .withOrgUnitLocality("Milan") + .withOrgUnitCrossrefIdentifier("12345") + .build(); + + Item projectItem = ItemBuilder.createItem(context, projects) + .withTitle("Test funding") + .withProjectStartDate("2001-03") + .withProjectEndDate("2010-03-25") + .withProjectInvestigator("Walter White") + .withProjectInvestigator("Jesse Pinkman") + .withProjectAmount("123") + .withProjectAmountCurrency("EUR") + .withOtherIdentifier("888-666-444") + .withIdentifier("000-111-333") + .withDescription("This is a funding to test orcid mapping") + .build(); + + EntityType projectType = EntityTypeBuilder.createEntityTypeBuilder(context, "Project").build(); + EntityType orgUnitType = EntityTypeBuilder.createEntityTypeBuilder(context, "OrgUnit").build(); + + RelationshipType isAuthorOfPublication = createRelationshipTypeBuilder(context, orgUnitType, projectType, + "isOrgUnitOfProject", "isProjectOfOrgUnit", 0, null, 0, null).build(); + + RelationshipBuilder.createRelationshipBuilder(context, orgUnit, projectItem, isAuthorOfPublication).build(); + + context.restoreAuthSystemState(); + + Activity activity = entityFactoryService.createOrcidObject(context, projectItem); + assertThat(activity, instanceOf(Funding.class)); + + Funding funding = (Funding) activity; + assertThat(funding.getTitle(), notNullValue()); + assertThat(funding.getTitle().getTitle(), notNullValue()); + assertThat(funding.getTitle().getTitle().getContent(), is("Test funding")); + assertThat(funding.getStartDate(), matches(date("2001", "03", "01"))); + assertThat(funding.getEndDate(), matches(date("2010", "03", "25"))); + assertThat(funding.getDescription(), is("This is a funding to test orcid mapping")); + assertThat(funding.getUrl(), matches(urlEndsWith(projectItem.getHandle()))); + assertThat(funding.getAmount(), notNullValue()); + assertThat(funding.getAmount().getContent(), is("123")); + assertThat(funding.getAmount().getCurrencyCode(), is("EUR")); + + Organization organization = funding.getOrganization(); + assertThat(organization, notNullValue()); + assertThat(organization.getName(), is("4Science")); + assertThat(organization.getAddress(), notNullValue()); + assertThat(organization.getAddress().getCountry(), is(Iso3166Country.IT)); + assertThat(organization.getAddress().getCity(), is("Milan")); + assertThat(organization.getDisambiguatedOrganization(), notNullValue()); + assertThat(organization.getDisambiguatedOrganization().getDisambiguatedOrganizationIdentifier(), is("12345")); + assertThat(organization.getDisambiguatedOrganization().getDisambiguationSource(), is("FUNDREF")); + + FundingContributors fundingContributors = funding.getContributors(); + assertThat(fundingContributors, notNullValue()); + + List contributors = fundingContributors.getContributor(); + assertThat(contributors, hasSize(2)); + assertThat(contributors, has(fundingContributor("Walter White", LEAD))); + assertThat(contributors, has(fundingContributor("Jesse Pinkman", LEAD))); + + assertThat(funding.getExternalIdentifiers(), notNullValue()); + + List externalIds = funding.getExternalIdentifiers().getExternalIdentifier(); + assertThat(externalIds, hasSize(2)); + assertThat(externalIds, has(selfExternalId("other-id", "888-666-444"))); + assertThat(externalIds, has(selfExternalId("grant_number", "000-111-333"))); + } + + private Predicate selfExternalId(String type, String value) { + return externalId(type, value, Relationship.SELF); + } + + private Predicate externalId(String type, String value, Relationship relationship) { + return externalId -> externalId.getRelationship() == relationship + && type.equals(externalId.getType()) + && value.equals(externalId.getValue()); + } + + private Predicate contributor(String name, ContributorRole role, SequenceType sequence) { + return contributor -> contributor.getCreditName().getContent().equals(name) + && role.equals(contributor.getContributorAttributes().getContributorRole()) + && contributor.getContributorAttributes().getContributorSequence() == sequence; + } + + private Predicate fundingContributor(String name, FundingContributorRole role) { + return contributor -> contributor.getCreditName().getContent().equals(name) + && role.equals(contributor.getContributorAttributes().getContributorRole()); + } + + private Predicate date(String year, String month, String days) { + return date -> date != null + && year.equals(date.getYear().getValue()) + && month.equals(date.getMonth().getValue()) + && days.equals(date.getDay().getValue()); + } + + private Predicate urlEndsWith(String handle) { + return url -> url != null && url.getValue() != null && endsWith(url.getValue(), handle); + } +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java new file mode 100644 index 000000000000..894029f54e14 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/service/OrcidProfileSectionFactoryServiceIT.java @@ -0,0 +1,244 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import static org.dspace.app.matcher.LambdaMatcher.matches; +import static org.dspace.orcid.model.OrcidProfileSectionType.COUNTRY; +import static org.dspace.orcid.model.OrcidProfileSectionType.EXTERNAL_IDS; +import static org.dspace.orcid.model.OrcidProfileSectionType.KEYWORDS; +import static org.dspace.orcid.model.OrcidProfileSectionType.OTHER_NAMES; +import static org.dspace.orcid.model.OrcidProfileSectionType.RESEARCHER_URLS; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +import java.util.List; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.orcid.factory.OrcidServiceFactory; +import org.dspace.orcid.model.OrcidProfileSectionType; +import org.dspace.orcid.model.factory.OrcidProfileSectionFactory; +import org.junit.Before; +import org.junit.Test; +import org.orcid.jaxb.model.common.Iso3166Country; +import org.orcid.jaxb.model.common.Relationship; +import org.orcid.jaxb.model.v3.release.record.Address; +import org.orcid.jaxb.model.v3.release.record.Keyword; +import org.orcid.jaxb.model.v3.release.record.OtherName; +import org.orcid.jaxb.model.v3.release.record.PersonExternalIdentifier; +import org.orcid.jaxb.model.v3.release.record.ResearcherUrl; + +/** + * Integration tests for {@link OrcidProfileSectionFactoryService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OrcidProfileSectionFactoryServiceIT extends AbstractIntegrationTestWithDatabase { + + private OrcidProfileSectionFactoryService profileSectionFactoryService; + + private ItemService itemService; + + private Collection collection; + + @Before + public void setup() { + + profileSectionFactoryService = OrcidServiceFactory.getInstance().getOrcidProfileSectionFactoryService(); + itemService = ContentServiceFactory.getInstance().getItemService(); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withTitle("Parent community") + .build(); + + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Person") + .build(); + + context.restoreAuthSystemState(); + } + + @Test + public void testAddressCreation() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withPersonCountry("IT") + .build(); + context.restoreAuthSystemState(); + + List values = List.of(getMetadata(item, "person.country", 0)); + + Object orcidObject = profileSectionFactoryService.createOrcidObject(context, values, COUNTRY); + assertThat(orcidObject, instanceOf(Address.class)); + Address address = (Address) orcidObject; + assertThat(address.getCountry(), notNullValue()); + assertThat(address.getCountry().getValue(), is(Iso3166Country.IT)); + + } + + @Test + public void testAddressMetadataSignatureGeneration() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withPersonCountry("IT") + .build(); + context.restoreAuthSystemState(); + + OrcidProfileSectionFactory countryFactory = getFactory(item, COUNTRY); + + List signatures = countryFactory.getMetadataSignatures(context, item); + assertThat(signatures, hasSize(1)); + assertThat(countryFactory.getDescription(context, item, signatures.get(0)), is("IT")); + } + + @Test + public void testExternalIdentifiersCreation() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withScopusAuthorIdentifier("SCOPUS-123456") + .withResearcherIdentifier("R-ID-01") + .build(); + context.restoreAuthSystemState(); + + List values = List.of(getMetadata(item, "person.identifier.scopus-author-id", 0)); + + Object firstOrcidObject = profileSectionFactoryService.createOrcidObject(context, values, EXTERNAL_IDS); + assertThat(firstOrcidObject, instanceOf(PersonExternalIdentifier.class)); + assertThat((PersonExternalIdentifier) firstOrcidObject, matches(hasTypeAndValue("SCOPUS", "SCOPUS-123456"))); + + values = List.of(getMetadata(item, "person.identifier.rid", 0)); + + Object secondOrcidObject = profileSectionFactoryService.createOrcidObject(context, values, EXTERNAL_IDS); + assertThat(secondOrcidObject, instanceOf(PersonExternalIdentifier.class)); + assertThat((PersonExternalIdentifier) secondOrcidObject, matches(hasTypeAndValue("RID", "R-ID-01"))); + } + + @Test + public void testExternalIdentifiersGeneration() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withScopusAuthorIdentifier("SCOPUS-123456") + .withResearcherIdentifier("R-ID-01") + .build(); + context.restoreAuthSystemState(); + + OrcidProfileSectionFactory externalIdsFactory = getFactory(item, EXTERNAL_IDS); + List signatures = externalIdsFactory.getMetadataSignatures(context, item); + assertThat(signatures, hasSize(2)); + + List descriptions = signatures.stream() + .map(signature -> externalIdsFactory.getDescription(context, item, signature)) + .collect(Collectors.toList()); + + assertThat(descriptions, containsInAnyOrder("SCOPUS-123456", "R-ID-01")); + } + + @Test + public void testResearcherUrlsCreation() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withUriIdentifier("www.test.com") + .build(); + context.restoreAuthSystemState(); + + List values = List.of(getMetadata(item, "dc.identifier.uri", 0)); + + Object orcidObject = profileSectionFactoryService.createOrcidObject(context, values, RESEARCHER_URLS); + assertThat(orcidObject, instanceOf(ResearcherUrl.class)); + assertThat((ResearcherUrl) orcidObject, matches(hasUrl("www.test.com"))); + } + + @Test + public void testKeywordsCreation() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withSubject("Subject") + .build(); + context.restoreAuthSystemState(); + + List values = List.of(getMetadata(item, "dc.subject", 0)); + Object orcidObject = profileSectionFactoryService.createOrcidObject(context, values, KEYWORDS); + assertThat(orcidObject, instanceOf(Keyword.class)); + assertThat((Keyword) orcidObject, matches(hasContent("Subject"))); + } + + @Test + public void testOtherNamesCreation() { + context.turnOffAuthorisationSystem(); + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Test profile") + .withVariantName("Variant name") + .withVernacularName("Vernacular name") + .build(); + context.restoreAuthSystemState(); + + List values = List.of(getMetadata(item, "person.name.variant", 0)); + Object orcidObject = profileSectionFactoryService.createOrcidObject(context, values, OTHER_NAMES); + assertThat(orcidObject, instanceOf(OtherName.class)); + assertThat((OtherName) orcidObject, matches(hasValue("Variant name"))); + + values = List.of(getMetadata(item, "person.name.translated", 0)); + orcidObject = profileSectionFactoryService.createOrcidObject(context, values, OTHER_NAMES); + assertThat(orcidObject, instanceOf(OtherName.class)); + assertThat((OtherName) orcidObject, matches(hasValue("Vernacular name"))); + } + + private MetadataValue getMetadata(Item item, String metadataField, int place) { + List values = itemService.getMetadataByMetadataString(item, metadataField); + assertThat(values.size(), greaterThan(place)); + return values.get(place); + } + + private Predicate hasTypeAndValue(String type, String value) { + return identifier -> value.equals(identifier.getValue()) + && type.equals(identifier.getType()) + && identifier.getRelationship() == Relationship.SELF + && identifier.getUrl() != null && value.equals(identifier.getUrl().getValue()); + } + + private Predicate hasUrl(String url) { + return researcherUrl -> researcherUrl.getUrl() != null && url.equals(researcherUrl.getUrl().getValue()); + } + + private Predicate hasContent(String value) { + return keyword -> value.equals(keyword.getContent()); + } + + private Predicate hasValue(String value) { + return name -> value.equals(name.getContent()); + } + + private OrcidProfileSectionFactory getFactory(Item item, OrcidProfileSectionType sectionType) { + return profileSectionFactoryService.findBySectionType(sectionType) + .orElseThrow(() -> new IllegalStateException("No profile section factory of type " + sectionType)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java b/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java new file mode 100644 index 000000000000..66b9a98e72ca --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/orcid/service/PlainMetadataSignatureGeneratorIT.java @@ -0,0 +1,166 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.orcid.service; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.notNullValue; + +import java.util.List; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.orcid.service.impl.PlainMetadataSignatureGeneratorImpl; +import org.junit.Before; +import org.junit.Test; + +/** + * Integration tests for {@link PlainMetadataSignatureGeneratorImpl}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class PlainMetadataSignatureGeneratorIT extends AbstractIntegrationTestWithDatabase { + + private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); + + private Collection collection; + + private MetadataSignatureGenerator generator = new PlainMetadataSignatureGeneratorImpl(); + + @Before + public void setup() { + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withTitle("Parent community") + .build(); + + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .withEntityType("Person") + .build(); + + context.restoreAuthSystemState(); + } + + @Test + public void testSignatureGenerationWithManyMetadataValues() { + + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item title") + .withIssueDate("2020-01-01") + .withAuthor("Jesse Pinkman") + .withEditor("Editor") + .build(); + + context.restoreAuthSystemState(); + + MetadataValue author = getMetadata(item, "dc.contributor.author", 0); + MetadataValue editor = getMetadata(item, "dc.contributor.editor", 0); + + String signature = generator.generate(context, List.of(author, editor)); + assertThat(signature, notNullValue()); + + String expectedSignature = "dc.contributor.author::Jesse Pinkman§§" + + "dc.contributor.editor::Editor"; + + assertThat(signature, equalTo(expectedSignature)); + + String anotherSignature = generator.generate(context, List.of(editor, author)); + assertThat(anotherSignature, equalTo(signature)); + + List metadataValues = generator.findBySignature(context, item, signature); + assertThat(metadataValues, hasSize(2)); + assertThat(metadataValues, containsInAnyOrder(author, editor)); + + } + + @Test + public void testSignatureGenerationWithSingleMetadataValue() { + + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item title") + .withDescription("Description") + .withAuthor("Jesse Pinkman") + .withUriIdentifier("https://www.4science.it/en") + .build(); + + context.restoreAuthSystemState(); + + MetadataValue description = getMetadata(item, "dc.description", 0); + String signature = generator.generate(context, List.of(description)); + assertThat(signature, notNullValue()); + assertThat(signature, equalTo("dc.description::Description")); + + List metadataValues = generator.findBySignature(context, item, signature); + assertThat(metadataValues, hasSize(1)); + assertThat(metadataValues, containsInAnyOrder(description)); + + MetadataValue url = getMetadata(item, "dc.identifier.uri", 0); + signature = generator.generate(context, List.of(url)); + assertThat(signature, equalTo("dc.identifier.uri::https://www.4science.it/en")); + + metadataValues = generator.findBySignature(context, item, signature); + assertThat(metadataValues, hasSize(1)); + assertThat(metadataValues, containsInAnyOrder(url)); + + } + + @Test + public void testSignatureGenerationWithManyEqualsMetadataValues() { + context.turnOffAuthorisationSystem(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Item title") + .withDescription("Description") + .withAuthor("Jesse Pinkman") + .withAuthor("Jesse Pinkman") + .build(); + + context.restoreAuthSystemState(); + + MetadataValue firstAuthor = getMetadata(item, "dc.contributor.author", 0); + String firstSignature = generator.generate(context, List.of(firstAuthor)); + assertThat(firstSignature, notNullValue()); + assertThat(firstSignature, equalTo("dc.contributor.author::Jesse Pinkman")); + + MetadataValue secondAuthor = getMetadata(item, "dc.contributor.author", 1); + String secondSignature = generator.generate(context, List.of(secondAuthor)); + assertThat(secondSignature, notNullValue()); + assertThat(secondSignature, equalTo("dc.contributor.author::Jesse Pinkman")); + + List metadataValues = generator.findBySignature(context, item, firstSignature); + assertThat(metadataValues, hasSize(1)); + assertThat(metadataValues, anyOf(contains(firstAuthor), contains(secondAuthor))); + } + + private MetadataValue getMetadata(Item item, String metadataField, int place) { + List values = itemService.getMetadataByMetadataString(item, metadataField); + assertThat(values.size(), greaterThan(place)); + return values.get(place); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/process/ProcessIT.java b/dspace-api/src/test/java/org/dspace/process/ProcessIT.java new file mode 100644 index 000000000000..d6640652121c --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/process/ProcessIT.java @@ -0,0 +1,90 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.process; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.UUID; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.ProcessBuilder; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.dspace.scripts.Process; +import org.dspace.scripts.factory.ScriptServiceFactory; +import org.dspace.scripts.service.ProcessService; +import org.junit.Test; + +/** + * This class will aim to test Process related use cases + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public class ProcessIT extends AbstractIntegrationTestWithDatabase { + + protected ProcessService processService = ScriptServiceFactory.getInstance().getProcessService(); + protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + + @Test + public void checkProcessGroupsTest() throws Exception { + context.turnOffAuthorisationSystem(); + Group groupA = GroupBuilder.createGroup(context) + .withName("Group A") + .addMember(admin) + .build(); + + Set groupSet = new HashSet<>(); + groupSet.add(groupA); + + Process processA = ProcessBuilder.createProcess(context, admin, "mock-script", + new LinkedList<>(), + groupSet).build(); + + context.restoreAuthSystemState(); + Process process = processService.find(context, processA.getID()); + List groups = process.getGroups(); + boolean isPresent = groups.stream().anyMatch(g -> g.getID().equals(groupA.getID())); + assertTrue(isPresent); + } + + @Test + public void removeOneGroupTest() throws Exception { + + context.turnOffAuthorisationSystem(); + Group groupA = GroupBuilder.createGroup(context) + .withName("Group A") + .addMember(admin).build(); + + Set groupSet = new HashSet<>(); + groupSet.add(groupA); + + UUID groupUuid = groupA.getID(); + Process processA = ProcessBuilder.createProcess(context, admin, "mock-script", new LinkedList<>(), + groupSet).build(); + + context.restoreAuthSystemState(); + + groupService.delete(context, groupA); + context.commit(); + context.reloadEntity(groupA); + processA = context.reloadEntity(processA); + + Process process = processService.find(context, processA.getID()); + List groups = process.getGroups(); + boolean isPresent = groups.stream().anyMatch(g -> g.getID().equals(groupUuid)); + assertFalse(isPresent); + + } +} diff --git a/dspace-api/src/test/java/org/dspace/qaevent/MockQAEventService.java b/dspace-api/src/test/java/org/dspace/qaevent/MockQAEventService.java new file mode 100644 index 000000000000..3d460015f7e0 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/qaevent/MockQAEventService.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent; + +import java.io.IOException; + +import org.apache.solr.client.solrj.SolrServerException; +import org.dspace.qaevent.service.impl.QAEventServiceImpl; +import org.dspace.solr.MockSolrServer; +import org.springframework.beans.factory.DisposableBean; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.stereotype.Service; + +/** + * Mock SOLR service for the qaevents Core. + */ +@Service +public class MockQAEventService extends QAEventServiceImpl implements InitializingBean, DisposableBean { + private MockSolrServer mockSolrServer; + + @Override + public void afterPropertiesSet() throws Exception { + mockSolrServer = new MockSolrServer("qaevent"); + solr = mockSolrServer.getSolrServer(); + } + + /** Clear all records from the search core. */ + public void reset() { + mockSolrServer.reset(); + try { + mockSolrServer.getSolrServer().commit(); + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public void destroy() throws Exception { + mockSolrServer.destroy(); + } +} \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/qaevent/script/OpenaireEventsImportIT.java b/dspace-api/src/test/java/org/dspace/qaevent/script/OpenaireEventsImportIT.java new file mode 100644 index 000000000000..6bb979f48be8 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/qaevent/script/OpenaireEventsImportIT.java @@ -0,0 +1,488 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.qaevent.script; + +import static java.util.List.of; +import static org.dspace.content.QAEvent.OPENAIRE_SOURCE; +import static org.dspace.matcher.QAEventMatcher.pendingOpenaireEventWith; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileInputStream; +import java.io.OutputStream; +import java.net.URL; + +import eu.dnetlib.broker.BrokerClient; +import org.apache.commons.io.IOUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.launcher.ScriptLauncher; +import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.matcher.QASourceMatcher; +import org.dspace.matcher.QATopicMatcher; +import org.dspace.qaevent.service.OpenaireClientFactory; +import org.dspace.qaevent.service.QAEventService; +import org.dspace.qaevent.service.impl.OpenaireClientFactoryImpl; +import org.dspace.utils.DSpace; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Integration tests for {@link OpenaireEventsImport}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class OpenaireEventsImportIT extends AbstractIntegrationTestWithDatabase { + + private static final String BASE_JSON_DIR_PATH = "org/dspace/app/openaire-events/"; + + private static final String ORDER_FIELD = "topic"; + + private QAEventService qaEventService = new DSpace().getSingletonService(QAEventService.class); + + private Collection collection; + + private BrokerClient brokerClient = OpenaireClientFactory.getInstance().getBrokerClient(); + + private BrokerClient mockBrokerClient = mock(BrokerClient.class); + + @Before + public void setup() { + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + + context.restoreAuthSystemState(); + + ((OpenaireClientFactoryImpl) OpenaireClientFactory.getInstance()).setBrokerClient(mockBrokerClient); + } + + @After + public void after() { + ((OpenaireClientFactoryImpl) OpenaireClientFactory.getInstance()).setBrokerClient(brokerClient); + } + + @Test + public void testWithoutParameters() throws Exception { + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "import-openaire-events" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + assertThat(handler.getInfoMessages(), empty()); + + Exception exception = handler.getException(); + assertThat(exception, instanceOf(IllegalArgumentException.class)); + assertThat(exception.getMessage(), is("One parameter between the location of the file and the email " + + "must be entered to proceed with the import.")); + + verifyNoInteractions(mockBrokerClient); + } + + @Test + public void testWithBothFileAndEmailParameters() throws Exception { + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "import-openaire-events", "-f", getFileLocation("events.json"), + "-e", "test@user.com" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + assertThat(handler.getInfoMessages(), empty()); + + Exception exception = handler.getException(); + assertThat(exception, instanceOf(IllegalArgumentException.class)); + assertThat(exception.getMessage(), is("Only one parameter between the location of the file and the email " + + "must be entered to proceed with the import.")); + + verifyNoInteractions(mockBrokerClient); + } + + @Test + @SuppressWarnings("unchecked") + public void testManyEventsImportFromFile() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item firstItem = createItem("Test item", "123456789/99998"); + Item secondItem = createItem("Test item 2", "123456789/99999"); + + context.restoreAuthSystemState(); + + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "import-openaire-events", "-f", getFileLocation("events.json") }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), empty()); + assertThat(handler.getInfoMessages(), contains( + "Trying to read the QA events from the provided file", + "Found 5 events in the given file")); + + assertThat(qaEventService.findAllSources(0, 20), contains(QASourceMatcher.with(OPENAIRE_SOURCE, 5L))); + + assertThat(qaEventService.findAllTopics(0, 20, ORDER_FIELD, false), containsInAnyOrder( + QATopicMatcher.with("ENRICH/MORE/PROJECT", 1L), + QATopicMatcher.with("ENRICH/MORE/PID", 1L), + QATopicMatcher.with("ENRICH/MISSING/PID", 1L), + QATopicMatcher.with("ENRICH/MISSING/PROJECT", 1L), + QATopicMatcher.with("ENRICH/MISSING/ABSTRACT", 1L))); + + String projectMessage = "{\"projects[0].acronym\":\"PAThs\",\"projects[0].code\":\"687567\"," + + "\"projects[0].funder\":\"EC\",\"projects[0].fundingProgram\":\"H2020\"," + + "\"projects[0].jurisdiction\":\"EU\"," + + "\"projects[0].openaireId\":\"40|corda__h2020::6e32f5eb912688f2424c68b851483ea4\"," + + "\"projects[0].title\":\"Tracking Papyrus and Parchment Paths\"}"; + + assertThat(qaEventService.findEventsByTopic("ENRICH/MORE/PROJECT"), contains( + pendingOpenaireEventWith("oai:www.openstarts.units.it:123456789/99998", firstItem, + "Egypt, crossroad of translations and literary interweavings", projectMessage, + "ENRICH/MORE/PROJECT", 1.00d))); + + String abstractMessage = "{\"abstracts[0]\":\"Missing Abstract\"}"; + + assertThat(qaEventService.findEventsByTopic("ENRICH/MISSING/ABSTRACT"), contains( + pendingOpenaireEventWith("oai:www.openstarts.units.it:123456789/99999", secondItem, "Test Publication", + abstractMessage, "ENRICH/MISSING/ABSTRACT", 1.00d))); + + verifyNoInteractions(mockBrokerClient); + + } + + @Test + public void testManyEventsImportFromFileWithUnknownHandle() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item item = createItem("Test item", "123456789/99999"); + + context.restoreAuthSystemState(); + + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "import-openaire-events", "-f", getFileLocation("events.json") }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), + contains("An error occurs storing the event with id b4e09c71312cd7c397969f56c900823f: " + + "Skipped event b4e09c71312cd7c397969f56c900823f related to the oai record " + + "oai:www.openstarts.units.it:123456789/99998 as the record was not found", + "An error occurs storing the event with id d050d2c4399c6c6ccf27d52d479d26e4: " + + "Skipped event d050d2c4399c6c6ccf27d52d479d26e4 related to the oai record " + + "oai:www.openstarts.units.it:123456789/99998 as the record was not found")); + assertThat(handler.getInfoMessages(), contains( + "Trying to read the QA events from the provided file", + "Found 5 events in the given file")); + + assertThat(qaEventService.findAllSources(0, 20), contains(QASourceMatcher.with(OPENAIRE_SOURCE, 3L))); + + assertThat(qaEventService.findAllTopics(0, 20, ORDER_FIELD, false), containsInAnyOrder( + QATopicMatcher.with("ENRICH/MISSING/ABSTRACT", 1L), + QATopicMatcher.with("ENRICH/MISSING/PROJECT", 1L), + QATopicMatcher.with("ENRICH/MORE/PID", 1L) + )); + + String abstractMessage = "{\"abstracts[0]\":\"Missing Abstract\"}"; + + assertThat(qaEventService.findEventsByTopic("ENRICH/MISSING/ABSTRACT"), contains( + pendingOpenaireEventWith("oai:www.openstarts.units.it:123456789/99999", item, "Test Publication", + abstractMessage, "ENRICH/MISSING/ABSTRACT", 1.00d))); + + verifyNoInteractions(mockBrokerClient); + + } + + @Test + public void testManyEventsImportFromFileWithUnknownTopic() throws Exception { + + context.turnOffAuthorisationSystem(); + + createItem("Test item", "123456789/99999"); + Item secondItem = createItem("Test item 2", "123456789/999991"); + + context.restoreAuthSystemState(); + + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "import-openaire-events", "-f", getFileLocation("unknown-topic-events.json") }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), + contains("Event for topic ENRICH/MORE/UNKNOWN is not allowed in the qaevents.cfg")); + assertThat(handler.getInfoMessages(), contains( + "Trying to read the QA events from the provided file", + "Found 2 events in the given file")); + + assertThat(qaEventService.findAllSources(0, 20), contains(QASourceMatcher.with(OPENAIRE_SOURCE, 1L))); + + assertThat(qaEventService.findAllTopics(0, 20, ORDER_FIELD, false), + contains(QATopicMatcher.with("ENRICH/MISSING/ABSTRACT", 1L))); + + String abstractMessage = "{\"abstracts[0]\":\"Missing Abstract\"}"; + + assertThat(qaEventService.findEventsByTopic("ENRICH/MISSING/ABSTRACT"), contains( + pendingOpenaireEventWith("oai:www.openstarts.units.it:123456789/999991", secondItem, "Test Publication 2", + abstractMessage, "ENRICH/MISSING/ABSTRACT", 1.00d))); + + verifyNoInteractions(mockBrokerClient); + + } + + @Test + public void testImportFromFileWithoutEvents() throws Exception { + + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "import-openaire-events", "-f", getFileLocation("empty-file.json") }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + + assertThat(handler.getErrorMessages(), + contains(containsString("A not recoverable error occurs during OPENAIRE events import"))); + assertThat(handler.getWarningMessages(),empty()); + assertThat(handler.getInfoMessages(), contains("Trying to read the QA events from the provided file")); + + assertThat(qaEventService.findAllSources(0, 20), contains(QASourceMatcher.with(OPENAIRE_SOURCE, 0L))); + + assertThat(qaEventService.findAllTopics(0, 20, ORDER_FIELD, false), empty()); + + verifyNoInteractions(mockBrokerClient); + } + + @Test + @SuppressWarnings("unchecked") + public void testImportFromOpenaireBroker() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item firstItem = createItem("Test item", "123456789/99998"); + Item secondItem = createItem("Test item 2", "123456789/99999"); + Item thirdItem = createItem("Test item 3", "123456789/999991"); + + context.restoreAuthSystemState(); + + URL openaireURL = new URL("http://api.openaire.eu/broker"); + + when(mockBrokerClient.listSubscriptions(openaireURL, "user@test.com")).thenReturn(of("sub1", "sub2", "sub3")); + + doAnswer(i -> writeToOutputStream(i.getArgument(2, OutputStream.class), "events.json")) + .when(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub1"), any()); + + doAnswer(i -> writeToOutputStream(i.getArgument(2, OutputStream.class), "empty-events-list.json")) + .when(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub2"), any()); + + doAnswer(i -> writeToOutputStream(i.getArgument(2, OutputStream.class), "unknown-topic-events.json")) + .when(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub3"), any()); + + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "import-openaire-events", "-e", "user@test.com" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + + assertThat(handler.getErrorMessages(), empty()); + assertThat(handler.getWarningMessages(), + contains("Event for topic ENRICH/MORE/UNKNOWN is not allowed in the qaevents.cfg")); + assertThat(handler.getInfoMessages(), contains( + "Trying to read the QA events from the OPENAIRE broker", + "Found 3 subscriptions related to the given email", + "Found 5 events from the subscription sub1", + "Found 0 events from the subscription sub2", + "Found 2 events from the subscription sub3")); + + assertThat(qaEventService.findAllSources(0, 20), contains(QASourceMatcher.with(OPENAIRE_SOURCE, 6L))); + + assertThat(qaEventService.findAllTopics(0, 20, ORDER_FIELD, false), containsInAnyOrder( + QATopicMatcher.with("ENRICH/MORE/PROJECT", 1L), + QATopicMatcher.with("ENRICH/MORE/PID", 1L), + QATopicMatcher.with("ENRICH/MISSING/PID", 1L), + QATopicMatcher.with("ENRICH/MISSING/PROJECT", 1L), + QATopicMatcher.with("ENRICH/MISSING/ABSTRACT", 2L))); + + String projectMessage = "{\"projects[0].acronym\":\"PAThs\",\"projects[0].code\":\"687567\"," + + "\"projects[0].funder\":\"EC\",\"projects[0].fundingProgram\":\"H2020\"," + + "\"projects[0].jurisdiction\":\"EU\"," + + "\"projects[0].openaireId\":\"40|corda__h2020::6e32f5eb912688f2424c68b851483ea4\"," + + "\"projects[0].title\":\"Tracking Papyrus and Parchment Paths\"}"; + + assertThat(qaEventService.findEventsByTopic("ENRICH/MORE/PROJECT"), contains( + pendingOpenaireEventWith("oai:www.openstarts.units.it:123456789/99998", firstItem, + "Egypt, crossroad of translations and literary interweavings", projectMessage, + "ENRICH/MORE/PROJECT", 1.00d))); + + String abstractMessage = "{\"abstracts[0]\":\"Missing Abstract\"}"; + + assertThat(qaEventService.findEventsByTopic("ENRICH/MISSING/ABSTRACT"), containsInAnyOrder( + pendingOpenaireEventWith("oai:www.openstarts.units.it:123456789/99999", secondItem, "Test Publication", + abstractMessage, "ENRICH/MISSING/ABSTRACT", 1.00d), + pendingOpenaireEventWith("oai:www.openstarts.units.it:123456789/999991", thirdItem, "Test Publication 2", + abstractMessage, "ENRICH/MISSING/ABSTRACT", 1.00d))); + + verify(mockBrokerClient).listSubscriptions(openaireURL, "user@test.com"); + verify(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub1"), any()); + verify(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub2"), any()); + verify(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub3"), any()); + + verifyNoMoreInteractions(mockBrokerClient); + } + + @Test + public void testImportFromOpenaireBrokerWithErrorDuringListSubscription() throws Exception { + + URL openaireURL = new URL("http://api.openaire.eu/broker"); + + when(mockBrokerClient.listSubscriptions(openaireURL, "user@test.com")) + .thenThrow(new RuntimeException("Connection refused")); + + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "import-openaire-events", "-e", "user@test.com" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + + assertThat(handler.getErrorMessages(), + contains("A not recoverable error occurs during OPENAIRE events import: Connection refused")); + assertThat(handler.getWarningMessages(), empty()); + assertThat(handler.getInfoMessages(), contains("Trying to read the QA events from the OPENAIRE broker")); + + assertThat(qaEventService.findAllSources(0, 20), contains(QASourceMatcher.with(OPENAIRE_SOURCE, 0L))); + + assertThat(qaEventService.findAllTopics(0, 20, ORDER_FIELD, false), empty()); + + verify(mockBrokerClient).listSubscriptions(openaireURL, "user@test.com"); + + verifyNoMoreInteractions(mockBrokerClient); + + } + + @Test + @SuppressWarnings("unchecked") + public void testImportFromOpenaireBrokerWithErrorDuringEventsDownload() throws Exception { + + context.turnOffAuthorisationSystem(); + + createItem("Test item", "123456789/99998"); + createItem("Test item 2", "123456789/99999"); + createItem("Test item 3", "123456789/999991"); + + context.restoreAuthSystemState(); + + URL openaireURL = new URL("http://api.openaire.eu/broker"); + + when(mockBrokerClient.listSubscriptions(openaireURL, "user@test.com")).thenReturn(of("sub1", "sub2", "sub3")); + + doAnswer(i -> writeToOutputStream(i.getArgument(2, OutputStream.class), "events.json")) + .when(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub1"), any()); + + doThrow(new RuntimeException("Invalid subscription id")) + .when(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub2"), any()); + + doAnswer(i -> writeToOutputStream(i.getArgument(2, OutputStream.class), "unknown-topic-events.json")) + .when(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub3"), any()); + + TestDSpaceRunnableHandler handler = new TestDSpaceRunnableHandler(); + + String[] args = new String[] { "import-openaire-events", "-e", "user@test.com" }; + ScriptLauncher.handleScript(args, ScriptLauncher.getConfig(kernelImpl), handler, kernelImpl); + + assertThat(handler.getErrorMessages(), contains("An error occurs downloading the events " + + "related to the subscription sub2: Invalid subscription id")); + assertThat(handler.getWarningMessages(), + contains("Event for topic ENRICH/MORE/UNKNOWN is not allowed in the qaevents.cfg")); + assertThat(handler.getInfoMessages(), contains( + "Trying to read the QA events from the OPENAIRE broker", + "Found 3 subscriptions related to the given email", + "Found 5 events from the subscription sub1", + "Found 0 events from the subscription sub2", + "Found 2 events from the subscription sub3")); + + assertThat(qaEventService.findAllSources(0, 20), contains(QASourceMatcher.with(OPENAIRE_SOURCE, 6L))); + + assertThat(qaEventService.findAllTopics(0, 20, ORDER_FIELD, false), containsInAnyOrder( + QATopicMatcher.with("ENRICH/MORE/PROJECT", 1L), + QATopicMatcher.with("ENRICH/MISSING/PID", 1L), + QATopicMatcher.with("ENRICH/MORE/PID", 1L), + QATopicMatcher.with("ENRICH/MISSING/PROJECT", 1L), + QATopicMatcher.with("ENRICH/MISSING/ABSTRACT", 2L))); + + assertThat(qaEventService.findEventsByTopic("ENRICH/MORE/PROJECT"), hasSize(1)); + assertThat(qaEventService.findEventsByTopic("ENRICH/MISSING/ABSTRACT"), hasSize(2)); + + verify(mockBrokerClient).listSubscriptions(openaireURL, "user@test.com"); + verify(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub1"), any()); + verify(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub2"), any()); + verify(mockBrokerClient).downloadEvents(eq(openaireURL), eq("sub3"), any()); + + verifyNoMoreInteractions(mockBrokerClient); + + } + + private Item createItem(String title, String handle) { + return ItemBuilder.createItem(context, collection) + .withTitle(title) + .withHandle(handle) + .build(); + } + + private Void writeToOutputStream(OutputStream outputStream, String fileName) { + try { + byte[] fileContent = getFileContent(fileName); + IOUtils.write(fileContent, outputStream); + return null; + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private byte[] getFileContent(String fileName) throws Exception { + String fileLocation = getFileLocation(fileName); + try (FileInputStream fis = new FileInputStream(new File(fileLocation))) { + return IOUtils.toByteArray(fis); + } + } + + private String getFileLocation(String fileName) throws Exception { + URL resource = getClass().getClassLoader().getResource(BASE_JSON_DIR_PATH + fileName); + if (resource == null) { + throw new IllegalStateException("No resource found named " + BASE_JSON_DIR_PATH + fileName); + } + return new File(resource.getFile()).getAbsolutePath(); + } +} diff --git a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index 1197370e32f8..632b4e2f83f4 100644 --- a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,24 +31,13 @@ public void setDspaceRunnableClass(Class dspaceRunnableClass) { this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { Options options = new Options(); options.addOption("r", "remove", true, "description r"); - options.getOption("r").setType(String.class); options.addOption("i", "index", false, "description i"); - options.getOption("i").setType(boolean.class); options.getOption("i").setRequired(true); options.addOption("f", "file", true, "source file"); options.getOption("f").setType(InputStream.class); diff --git a/dspace-api/src/test/java/org/dspace/service/impl/ClientInfoServiceImplTest.java b/dspace-api/src/test/java/org/dspace/service/impl/ClientInfoServiceImplTest.java index d46aded5ac82..a883176c12f0 100644 --- a/dspace-api/src/test/java/org/dspace/service/impl/ClientInfoServiceImplTest.java +++ b/dspace-api/src/test/java/org/dspace/service/impl/ClientInfoServiceImplTest.java @@ -223,4 +223,40 @@ public void isUseProxiesEnabledFalse() { assertFalse(clientInfoService.isUseProxiesEnabled()); } + + @Test + public void testIpAnonymization() { + clientInfoService = new ClientInfoServiceImpl(configurationService); + + String remoteIp = "192.168.1.25"; + + assertEquals("192.168.1.25", clientInfoService.getClientIp(remoteIp, null)); + + try { + + configurationService.setProperty("client.ip-anonymization.parts", 1); + + assertEquals("192.168.1.0", clientInfoService.getClientIp(remoteIp, null)); + + configurationService.setProperty("client.ip-anonymization.parts", 2); + + assertEquals("192.168.0.0", clientInfoService.getClientIp(remoteIp, null)); + + configurationService.setProperty("client.ip-anonymization.parts", 3); + + assertEquals("192.0.0.0", clientInfoService.getClientIp(remoteIp, null)); + + configurationService.setProperty("client.ip-anonymization.parts", 4); + + assertEquals("0.0.0.0", clientInfoService.getClientIp(remoteIp, null)); + + configurationService.setProperty("client.ip-anonymization.parts", 5); + + assertEquals("192.168.1.25", clientInfoService.getClientIp(remoteIp, null)); + + } finally { + configurationService.setProperty("client.ip-anonymization.parts", 0); + } + + } } diff --git a/dspace-api/src/test/java/org/dspace/service/impl/HttpConnectionPoolServiceTest.java b/dspace-api/src/test/java/org/dspace/service/impl/HttpConnectionPoolServiceTest.java new file mode 100644 index 000000000000..60964cd004f1 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/service/impl/HttpConnectionPoolServiceTest.java @@ -0,0 +1,96 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.service.impl; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockserver.model.HttpRequest.request; +import static org.mockserver.model.HttpResponse.response; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; + +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpUriRequest; +import org.apache.http.client.methods.RequestBuilder; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.AbstractDSpaceTest; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.eclipse.jetty.http.HttpStatus; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.mockserver.client.MockServerClient; +import org.mockserver.junit.MockServerRule; + +/** + * + * @author Mark H. Wood + */ +public class HttpConnectionPoolServiceTest + extends AbstractDSpaceTest { + private static ConfigurationService configurationService; + + @Rule + public MockServerRule mockServerRule = new MockServerRule(this); + + private MockServerClient mockServerClient; + + @BeforeClass + public static void initClass() { + configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); + } + + /** + * Test of getClient method, of class HttpConnectionPoolService. + * @throws java.io.IOException if a connection cannot be closed. + * @throws java.net.URISyntaxException when an invalid URI is constructed. + */ + @Test + public void testGetClient() + throws IOException, URISyntaxException { + System.out.println("getClient"); + + configurationService.setProperty("solr.client.maxTotalConnections", 2); + configurationService.setProperty("solr.client.maxPerRoute", 2); + HttpConnectionPoolService instance = new HttpConnectionPoolService("solr"); + instance.configurationService = configurationService; + instance.init(); + + final String testPath = "/test"; + mockServerClient.when( + request() + .withPath(testPath) + ).respond( + response() + .withStatusCode(HttpStatus.OK_200) + ); + + try (CloseableHttpClient httpClient = instance.getClient()) { + assertNotNull("getClient should always return a client", httpClient); + + URI uri = new URIBuilder() + .setScheme("http") + .setHost("localhost") + .setPort(mockServerClient.getPort()) + .setPath(testPath) + .build(); + System.out.println(uri.toString()); + HttpUriRequest request = RequestBuilder.get(uri) + .build(); + try (CloseableHttpResponse response = httpClient.execute(request)) { + assertEquals("Response status should be OK", HttpStatus.OK_200, + response.getStatusLine().getStatusCode()); + } + } + } +} diff --git a/dspace-api/src/test/java/org/dspace/solr/MockSolrServer.java b/dspace-api/src/test/java/org/dspace/solr/MockSolrServer.java index e80d5f8e1750..aed0c088c362 100644 --- a/dspace-api/src/test/java/org/dspace/solr/MockSolrServer.java +++ b/dspace-api/src/test/java/org/dspace/solr/MockSolrServer.java @@ -171,6 +171,7 @@ private static synchronized void initSolrContainer() { * Discard the embedded Solr container. */ private static synchronized void destroyContainer() { + container.shutdown(); container = null; log.info("SOLR CoreContainer destroyed"); } diff --git a/dspace-api/src/test/java/org/dspace/statistics/MockSolrLoggerServiceImpl.java b/dspace-api/src/test/java/org/dspace/statistics/MockSolrLoggerServiceImpl.java index 7cb20c23d17e..782588d93df0 100644 --- a/dspace-api/src/test/java/org/dspace/statistics/MockSolrLoggerServiceImpl.java +++ b/dspace-api/src/test/java/org/dspace/statistics/MockSolrLoggerServiceImpl.java @@ -9,7 +9,6 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; import java.net.InetAddress; import java.util.ArrayList; @@ -29,6 +28,7 @@ import com.maxmind.geoip2.record.RepresentedCountry; import com.maxmind.geoip2.record.Traits; import org.dspace.solr.MockSolrServer; +import org.mockito.Mockito; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.stereotype.Service; @@ -55,7 +55,7 @@ public void afterPropertiesSet() throws Exception { // Mock GeoIP's DatabaseReader DatabaseReader reader = mock(DatabaseReader.class); // Ensure that any tests requesting a city() get a mock/fake CityResponse - when(reader.city(any(InetAddress.class))).thenReturn(mockCityResponse()); + Mockito.lenient().when(reader.city(any(InetAddress.class))).thenReturn(mockCityResponse()); // Save this mock DatabaseReader to be used by SolrLoggerService locationService = reader; } diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/ITIrusExportUsageEventListener.java b/dspace-api/src/test/java/org/dspace/statistics/export/ITIrusExportUsageEventListener.java index db27837e9b10..e28e8284a218 100644 --- a/dspace-api/src/test/java/org/dspace/statistics/export/ITIrusExportUsageEventListener.java +++ b/dspace-api/src/test/java/org/dspace/statistics/export/ITIrusExportUsageEventListener.java @@ -68,7 +68,6 @@ public class ITIrusExportUsageEventListener extends AbstractIntegrationTestWithD private static final Logger log = LogManager.getLogger(); - protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); @@ -87,7 +86,7 @@ public class ITIrusExportUsageEventListener extends AbstractIntegrationTestWithD .getServiceByName("testProcessedUrls", ArrayList.class); - private IrusExportUsageEventListener exportUsageEventListener = + private final IrusExportUsageEventListener exportUsageEventListener = DSpaceServicesFactory.getInstance() .getServiceManager() .getServicesByType(IrusExportUsageEventListener.class) @@ -106,9 +105,11 @@ public class ITIrusExportUsageEventListener extends AbstractIntegrationTestWithD /** - * Initializes the test by setting up all objects needed to create a test item + * Initializes the test by setting up all objects needed to create a test item. + * @throws java.lang.Exception passed through. */ @Before() + @Override public void setUp() throws Exception { super.setUp(); @@ -122,16 +123,16 @@ public void setUp() throws Exception { entityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build(); community = CommunityBuilder.createCommunity(context).build(); - collection = CollectionBuilder.createCollection(context, community).build(); + collection = CollectionBuilder.createCollection(context, community) + .withEntityType(entityType.getLabel()) + .build(); item = ItemBuilder.createItem(context, collection) - .withEntityType(entityType.getLabel()) .build(); File f = new File(testProps.get("test.bitstream").toString()); bitstream = BitstreamBuilder.createBitstream(context, item, new FileInputStream(f)).build(); itemNotToBeProcessed = ItemBuilder.createItem(context, collection) - .withEntityType(entityType.getLabel()) .withType("Excluded type") .build(); File itemNotToBeProcessedFile = new File(testProps.get("test.bitstream").toString()); @@ -153,11 +154,12 @@ public void setUp() throws Exception { } /** - * Clean up the created objects - * Empty the testProcessedUrls used to store succeeded urls - * Empty the database table where the failed urls are logged + * Clean up the created objects. + * Empty the testProcessedUrls used to store succeeded URLs. + * Empty the database table where the failed URLs are logged. */ @After + @Override public void destroy() throws Exception { try { context.turnOffAuthorisationSystem(); @@ -378,11 +380,13 @@ public void testReceiveEventOnBitstreamThatShouldNotBeProcessed() throws SQLExce /** * Test that an object that is not an Item or Bitstream is not processed + * @throws java.sql.SQLException passed through. */ @Test + @SuppressWarnings("ResultOfMethodCallIgnored") public void testReceiveEventOnNonRelevantObject() throws SQLException { - HttpServletRequest request = mock(HttpServletRequest.class); + mock(HttpServletRequest.class); UsageEvent usageEvent = mock(UsageEvent.class); when(usageEvent.getObject()).thenReturn(community); @@ -395,7 +399,6 @@ public void testReceiveEventOnNonRelevantObject() throws SQLException { assertEquals(0, all.size()); assertEquals(0, testProcessedUrls.size()); - } /** @@ -409,11 +412,6 @@ private boolean matchesString(String string, String regex) { Pattern p = Pattern.compile(regex); - if (p.matcher(string).matches()) { - return true; - } - return false; + return p.matcher(string).matches(); } - - } diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/processor/BitstreamEventProcessorIT.java b/dspace-api/src/test/java/org/dspace/statistics/export/processor/BitstreamEventProcessorIT.java index e4cb59fe6102..a690b1a1c6ef 100644 --- a/dspace-api/src/test/java/org/dspace/statistics/export/processor/BitstreamEventProcessorIT.java +++ b/dspace-api/src/test/java/org/dspace/statistics/export/processor/BitstreamEventProcessorIT.java @@ -37,13 +37,13 @@ */ public class BitstreamEventProcessorIT extends AbstractIntegrationTestWithDatabase { - private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - + private final ConfigurationService configurationService + = DSpaceServicesFactory.getInstance().getConfigurationService(); private String encodedUrl; - @Before + @Override public void setUp() throws Exception { super.setUp(); configurationService.setProperty("irus.statistics.tracker.enabled", true); diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/processor/ExportEventProcessorIT.java b/dspace-api/src/test/java/org/dspace/statistics/export/processor/ExportEventProcessorIT.java index 44848d579b80..e42003e4fc8b 100644 --- a/dspace-api/src/test/java/org/dspace/statistics/export/processor/ExportEventProcessorIT.java +++ b/dspace-api/src/test/java/org/dspace/statistics/export/processor/ExportEventProcessorIT.java @@ -134,8 +134,10 @@ public void testShouldProcessItemWhenNotArchived() throws SQLException { public void testShouldProcessItemWhenCanEdit() throws SQLException { context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context).build(); - Collection collection = CollectionBuilder.createCollection(context, community).build(); - Item item = ItemBuilder.createItem(context, collection).withEntityType(otherEntity.getLabel()).build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withEntityType(otherEntity.getLabel()) + .build(); + Item item = ItemBuilder.createItem(context, collection).build(); context.restoreAuthSystemState(); context.setCurrentUser(admin); @@ -154,10 +156,11 @@ public void testShouldProcessItemWhenShouldNotProcessType() throws Exception { context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context).build(); - Collection collection = CollectionBuilder.createCollection(context, community).build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withEntityType(publication.getLabel()) + .build(); Item item = ItemBuilder.createItem(context, collection) .withType("Excluded type") - .withEntityType(publication.getLabel()) .build(); context.restoreAuthSystemState(); @@ -176,8 +179,10 @@ public void testShouldProcessItemWhenShouldNotProcessType() throws Exception { public void testShouldProcessItemWhenShouldNotProcessEntity() throws SQLException { context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context).build(); - Collection collection = CollectionBuilder.createCollection(context, community).build(); - Item item = ItemBuilder.createItem(context, collection).withEntityType(otherEntity.getLabel()).build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withEntityType(otherEntity.getLabel()) + .build(); + Item item = ItemBuilder.createItem(context, collection).build(); context.restoreAuthSystemState(); ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); @@ -194,8 +199,10 @@ public void testShouldProcessItemWhenShouldNotProcessEntity() throws SQLExceptio public void testShouldProcessItem() throws SQLException { context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context).build(); - Collection collection = CollectionBuilder.createCollection(context, community).build(); - Item item = ItemBuilder.createItem(context, collection).withEntityType(publication.getLabel()).build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withEntityType(publication.getLabel()) + .build(); + Item item = ItemBuilder.createItem(context, collection).build(); context.restoreAuthSystemState(); ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); @@ -213,8 +220,10 @@ public void testShouldProcessItem() throws SQLException { public void testShouldProcessEntityType() throws SQLException { context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context).build(); - Collection collection = CollectionBuilder.createCollection(context, community).build(); - Item item = ItemBuilder.createItem(context, collection).withEntityType(publication.getLabel()).build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withEntityType(publication.getLabel()) + .build(); + Item item = ItemBuilder.createItem(context, collection).build(); context.restoreAuthSystemState(); ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); @@ -231,8 +240,10 @@ public void testShouldProcessEntityType() throws SQLException { public void testShouldProcessEntityTypeWhenNotInList() throws SQLException { context.turnOffAuthorisationSystem(); Community community = CommunityBuilder.createCommunity(context).build(); - Collection collection = CollectionBuilder.createCollection(context, community).build(); - Item item = ItemBuilder.createItem(context, collection).withEntityType(otherEntity.getLabel()).build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withEntityType(otherEntity.getLabel()) + .build(); + Item item = ItemBuilder.createItem(context, collection).build(); context.restoreAuthSystemState(); ExportEventProcessor exportEventProcessor = new ItemEventProcessor(context, request, item); diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/service/MockOpenUrlServiceImpl.java b/dspace-api/src/test/java/org/dspace/statistics/export/service/MockOpenUrlServiceImpl.java index 14ac9d36d5c7..a5fbfd029be1 100644 --- a/dspace-api/src/test/java/org/dspace/statistics/export/service/MockOpenUrlServiceImpl.java +++ b/dspace-api/src/test/java/org/dspace/statistics/export/service/MockOpenUrlServiceImpl.java @@ -15,7 +15,8 @@ import org.springframework.beans.factory.annotation.Autowired; /** - * Mock OpenUrlService that will ensure that IRUS tracker does need to be contacted in order to test the functionality + * Mock OpenUrlService that will ensure that IRUS tracker does need to be + * contacted in order to test the functionality. */ public class MockOpenUrlServiceImpl extends OpenUrlServiceImpl { @@ -23,13 +24,14 @@ public class MockOpenUrlServiceImpl extends OpenUrlServiceImpl { ArrayList testProcessedUrls; /** - * Returns a response code to simulate contact to the external url - * When the url contains "fail", a fail code 500 will be returned - * Otherwise the success code 200 will be returned + * Returns a response code to simulate contact to the external URL. + * When the URL contains "fail", a fail code 500 will be returned. + * Otherwise the success code 200 will be returned. * @param urlStr - * @return 200 or 500 depending on whether the "fail" keyword is present in the url + * @return 200 or 500 depending on whether the "fail" keyword is present in the URL. * @throws IOException */ + @Override protected int getResponseCodeFromUrl(final String urlStr) throws IOException { if (StringUtils.contains(urlStr, "fail")) { return HttpURLConnection.HTTP_INTERNAL_ERROR; diff --git a/dspace-api/src/test/java/org/dspace/statistics/export/service/OpenUrlServiceImplTest.java b/dspace-api/src/test/java/org/dspace/statistics/export/service/OpenUrlServiceImplTest.java index a67d0355d362..d214050e6b5a 100644 --- a/dspace-api/src/test/java/org/dspace/statistics/export/service/OpenUrlServiceImplTest.java +++ b/dspace-api/src/test/java/org/dspace/statistics/export/service/OpenUrlServiceImplTest.java @@ -9,9 +9,10 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.closeTo; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.doCallRealMethod; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -20,18 +21,23 @@ import static org.mockito.Mockito.when; import java.io.IOException; +import java.math.BigDecimal; import java.net.HttpURLConnection; import java.sql.SQLException; -import java.util.ArrayList; +import java.util.Date; import java.util.List; +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.apache.http.client.HttpClient; import org.dspace.core.Context; import org.dspace.statistics.export.OpenURLTracker; +import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.InjectMocks; +import org.mockito.ArgumentCaptor; import org.mockito.Mock; -import org.mockito.Spy; +import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; /** @@ -40,95 +46,155 @@ @RunWith(MockitoJUnitRunner.class) public class OpenUrlServiceImplTest { - @InjectMocks - @Spy + /** + * NOTE: Initialized as a Mockito spy in {@link #setUp()}. + */ private OpenUrlServiceImpl openUrlService; @Mock private FailedOpenURLTrackerService failedOpenURLTrackerService; + @Mock + private HttpClient httpClient; + + @Before + public void setUp() throws Exception { + // spy on the class under test + openUrlService = Mockito.spy(OpenUrlServiceImpl.class); + + // manually hook up dependencies (@autowire doesn't work when creating instances using Mockito) + openUrlService.failedOpenUrlTrackerService = failedOpenURLTrackerService; + + // IMPORTANT: mock http client to prevent making REAL http requests + doReturn(httpClient).when(openUrlService).getHttpClient(any()); + } + + /** + * Create a mock http response with the given status code. + * @param statusCode the http status code to use in the mock. + * @return a mocked http response. + */ + protected HttpResponse createMockHttpResponse(int statusCode) { + StatusLine statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(statusCode); + + HttpResponse httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + return httpResponse; + } + + /** + * Create a mock open url tracker with the given url. + * @param url the url to use in the mock. + * @return a mocked open url tracker. + */ + protected OpenURLTracker createMockTracker(String url) { + OpenURLTracker tracker = mock(OpenURLTracker.class); + when(tracker.getUrl()).thenReturn(url); + + return tracker; + } + /** * Test the processUrl method - * @throws IOException - * @throws SQLException */ @Test public void testProcessUrl() throws IOException, SQLException { Context context = mock(Context.class); - doReturn(HttpURLConnection.HTTP_OK).when(openUrlService) - .getResponseCodeFromUrl(anyString()); + doReturn(createMockHttpResponse(HttpURLConnection.HTTP_OK)).when(httpClient).execute(any()); openUrlService.processUrl(context, "test-url"); verify(openUrlService, times(0)).logfailed(context, "test-url"); - - } /** * Test the processUrl method when the url connection fails - * @throws IOException - * @throws SQLException */ @Test public void testProcessUrlOnFail() throws IOException, SQLException { Context context = mock(Context.class); - doReturn(HttpURLConnection.HTTP_INTERNAL_ERROR).when(openUrlService) - .getResponseCodeFromUrl(anyString()); + doReturn(createMockHttpResponse(HttpURLConnection.HTTP_INTERNAL_ERROR)).when(httpClient).execute(any()); doNothing().when(openUrlService).logfailed(any(Context.class), anyString()); openUrlService.processUrl(context, "test-url"); verify(openUrlService, times(1)).logfailed(context, "test-url"); - - } /** * Test the ReprocessFailedQueue method - * @throws SQLException */ @Test - public void testReprocessFailedQueue() throws SQLException { + public void testReprocessFailedQueue() throws IOException, SQLException { Context context = mock(Context.class); - List trackers = new ArrayList<>(); - OpenURLTracker tracker1 = mock(OpenURLTracker.class); - OpenURLTracker tracker2 = mock(OpenURLTracker.class); - OpenURLTracker tracker3 = mock(OpenURLTracker.class); - - trackers.add(tracker1); - trackers.add(tracker2); - trackers.add(tracker3); + List trackers = List.of( + createMockTracker("tacker1"), + createMockTracker("tacker2"), + createMockTracker("tacker3") + ); when(failedOpenURLTrackerService.findAll(any(Context.class))).thenReturn(trackers); - doNothing().when(openUrlService).tryReprocessFailed(any(Context.class), any(OpenURLTracker.class)); + + // NOTE: first http request will return status code 500, next one 404, then 200 + doReturn( + createMockHttpResponse(HttpURLConnection.HTTP_INTERNAL_ERROR), + createMockHttpResponse(HttpURLConnection.HTTP_NOT_FOUND), + createMockHttpResponse(HttpURLConnection.HTTP_OK) + ).when(httpClient).execute(any()); openUrlService.reprocessFailedQueue(context); verify(openUrlService, times(3)).tryReprocessFailed(any(Context.class), any(OpenURLTracker.class)); + // NOTE: http request for tracker 1 and 2 failed, so tracker 1 and 2 should be kept + // http request for tracker 3 succeeded, so tracker 3 should be removed + verify(failedOpenURLTrackerService, times(0)).remove(any(Context.class), eq(trackers.get(0))); + verify(failedOpenURLTrackerService, times(0)).remove(any(Context.class), eq(trackers.get(1))); + verify(failedOpenURLTrackerService, times(1)).remove(any(Context.class), eq(trackers.get(2))); } /** * Test the method that logs the failed urls in the db - * @throws SQLException */ @Test public void testLogfailed() throws SQLException { Context context = mock(Context.class); OpenURLTracker tracker1 = mock(OpenURLTracker.class); - doCallRealMethod().when(tracker1).setUrl(anyString()); - when(tracker1.getUrl()).thenCallRealMethod(); - when(failedOpenURLTrackerService.create(any(Context.class))).thenReturn(tracker1); String failedUrl = "failed-url"; openUrlService.logfailed(context, failedUrl); - assertThat(tracker1.getUrl(), is(failedUrl)); + verify(tracker1).setUrl(failedUrl); + + // NOTE: verify that setUploadDate received a timestamp whose value is no less than 5 seconds from now + ArgumentCaptor dateArgCaptor = ArgumentCaptor.forClass(Date.class); + verify(tracker1).setUploadDate(dateArgCaptor.capture()); + assertThat( + new BigDecimal(dateArgCaptor.getValue().getTime()), + closeTo(new BigDecimal(new Date().getTime()), new BigDecimal(5000)) + ); + } + + /** + * Tests whether the timeout gets set to 10 seconds when processing a url + */ + @Test + public void testTimeout() throws IOException, SQLException { + Context context = mock(Context.class); + + // 1. verify processUrl calls getHttpClient and getHttpClientRequestConfig once + doReturn(createMockHttpResponse(HttpURLConnection.HTTP_OK)).when(httpClient).execute(any()); + openUrlService.processUrl(context, "test-url"); + verify(openUrlService).getHttpClient(any()); + verify(openUrlService).getHttpClientRequestConfig(); + // 2. verify that getHttpClientRequestConfig sets the timeout + assertThat(openUrlService.getHttpClientRequestConfig().getConnectTimeout(), is(10 * 1000)); } } diff --git a/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java b/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java index 320cc55a0d12..1dbbdb6cd0a7 100644 --- a/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java +++ b/dspace-api/src/test/java/org/dspace/statistics/util/IPTableTest.java @@ -56,14 +56,15 @@ public void testAdd() throws Exception { IPTable instance = new IPTable(); // Add IP address instance.add(LOCALHOST); - // Add IP range + // Add IP range (contains 256 addresses) instance.add("192.168.1"); - // Make sure both exist + // Make sure it returns the addresses for all ranges Set ipSet = instance.toSet(); - assertEquals(2, ipSet.size()); + assertEquals(257, ipSet.size()); assertTrue(ipSet.contains(LOCALHOST)); - assertTrue(ipSet.contains("192.168.1")); + assertTrue(ipSet.contains("192.168.1.0")); + assertTrue(ipSet.contains("192.168.1.255")); } @Test @@ -76,13 +77,13 @@ public void testAddSameIPTwice() throws Exception { assertEquals(1, instance.toSet().size()); instance = new IPTable(); - // Add IP range & then add an IP from within that range + // Add IP range w/ 256 addresses & then add an IP from within that range instance.add("192.168.1"); instance.add("192.168.1.1"); // Verify only the range exists Set ipSet = instance.toSet(); - assertEquals(1, ipSet.size()); - assertTrue(ipSet.contains("192.168.1")); + assertEquals(256, ipSet.size()); + assertTrue(ipSet.contains("192.168.1.1")); instance = new IPTable(); // Now, switch order. Add IP address, then add a range encompassing that IP @@ -90,8 +91,8 @@ public void testAddSameIPTwice() throws Exception { instance.add("192.168.1"); // Verify only the range exists ipSet = instance.toSet(); - assertEquals(1, ipSet.size()); - assertTrue(ipSet.contains("192.168.1")); + assertEquals(256, ipSet.size()); + assertTrue(ipSet.contains("192.168.1.1")); } /** @@ -120,6 +121,48 @@ public void testContains() assertTrue("IP within an add()ed range should match", contains); } + @Test + public void testDashRangeContains() throws Exception { + IPTable instance = new IPTable(); + instance.add("192.168.0.0 - 192.168.0.245"); + + assertTrue("Range should contain lower limit", instance.contains("192.168.0.0")); + assertTrue("Range should contain upper limit", instance.contains("192.168.0.245")); + assertTrue("Range should contain value in between limits", instance.contains("192.168.0.123")); + assertTrue("Range should contain value in between limits", instance.contains("192.168.0.234")); + + assertFalse("Range should not contain value below lower limit", instance.contains("192.167.255.255")); + assertFalse("Range should not contain value above upper limit", instance.contains("192.168.0.246")); + } + + @Test + public void testSubnetRangeContains() throws Exception { + IPTable instance = new IPTable(); + instance.add("192.168.0.0/30"); // translates to 192.168.0.0 - 192.168.0.3 + + assertTrue("Range should contain lower limit", instance.contains("192.168.0.0")); + assertTrue("Range should contain upper limit", instance.contains("192.168.0.3")); + assertTrue("Range should contain values in between limits", instance.contains("192.168.0.1")); + assertTrue("Range should contain values in between limits", instance.contains("192.168.0.2")); + + assertFalse("Range should not contain value below lower limit", instance.contains("192.167.255.255")); + assertFalse("Range should not contain value above upper limit", instance.contains("192.168.0.4")); + } + + @Test + public void testImplicitRangeContains() throws Exception { + IPTable instance = new IPTable(); + instance.add("192.168.1"); + + assertTrue("Range should contain lower limit", instance.contains("192.168.1.0")); + assertTrue("Range should contain upper limit", instance.contains("192.168.1.255")); + assertTrue("Range should contain values in between limits", instance.contains("192.168.1.123")); + assertTrue("Range should contain values in between limits", instance.contains("192.168.1.234")); + + assertFalse("Range should not contain value below lower limit", instance.contains("192.168.0.0")); + assertFalse("Range should not contain value above upper limit", instance.contains("192.168.2.0")); + } + /** * Test of isEmpty method, of class IPTable. * @throws java.lang.Exception passed through. diff --git a/dspace-api/src/test/java/org/dspace/statistics/util/SpiderDetectorServiceImplTest.java b/dspace-api/src/test/java/org/dspace/statistics/util/SpiderDetectorServiceImplTest.java index 039fe31f11bb..24f8c0f124be 100644 --- a/dspace-api/src/test/java/org/dspace/statistics/util/SpiderDetectorServiceImplTest.java +++ b/dspace-api/src/test/java/org/dspace/statistics/util/SpiderDetectorServiceImplTest.java @@ -70,7 +70,7 @@ public void testCaseInsensitiveMatching() throws Exception { req.setAgent("msnboT Is WaTching you"); assertTrue("'msnbot' didn't match pattern", spiderDetectorService.isSpider(req)); - req.setAgent("FirefOx"); + req.setAgent("mozilla/5.0 (x11; linux x86_64; rv:91.0) gecko/20100101 firefox/91.0"); assertFalse("'Firefox' matched a pattern", spiderDetectorService.isSpider(req)); // Test IP patterns diff --git a/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java new file mode 100644 index 000000000000..7aae1cf2719c --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/storage/bitstore/S3BitStoreServiceIT.java @@ -0,0 +1,434 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.storage.bitstore; + +import static com.amazonaws.regions.Regions.DEFAULT_REGION; +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.dspace.storage.bitstore.S3BitStoreService.CSA; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.sql.SQLException; +import java.util.List; +import java.util.Map; + +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.AnonymousAWSCredentials; +import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.Bucket; +import com.amazonaws.services.s3.model.ObjectMetadata; +import io.findify.s3mock.S3Mock; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.app.matcher.LambdaMatcher; +import org.dspace.authorize.AuthorizeException; +import org.dspace.builder.BitstreamBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Bitstream; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.core.Utils; +import org.hamcrest.Matcher; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + + +/** + * @author Luca Giamminonni (luca.giamminonni at 4science.com) + */ +public class S3BitStoreServiceIT extends AbstractIntegrationTestWithDatabase { + + private static final String DEFAULT_BUCKET_NAME = "dspace-asset-localhost"; + + private S3BitStoreService s3BitStoreService; + + private AmazonS3 amazonS3Client; + + private S3Mock s3Mock; + + private Collection collection; + + private File s3Directory; + + @Before + public void setup() throws Exception { + + s3Directory = new File(System.getProperty("java.io.tmpdir"), "s3"); + + s3Mock = S3Mock.create(8001, s3Directory.getAbsolutePath()); + s3Mock.start(); + + amazonS3Client = createAmazonS3Client(); + + s3BitStoreService = new S3BitStoreService(amazonS3Client); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + + collection = CollectionBuilder.createCollection(context, parentCommunity) + .build(); + + context.restoreAuthSystemState(); + } + + @After + public void cleanUp() throws IOException { + FileUtils.deleteDirectory(s3Directory); + s3Mock.shutdown(); + } + + @Test + public void testBitstreamPutAndGetWithAlreadyPresentBucket() throws IOException { + + String bucketName = "testbucket"; + + amazonS3Client.createBucket(bucketName); + + s3BitStoreService.setBucketName(bucketName); + s3BitStoreService.init(); + + assertThat(amazonS3Client.listBuckets(), contains(bucketNamed(bucketName))); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + String expectedChecksum = Utils.toHex(generateChecksum(content)); + + assertThat(bitstream.getSizeBytes(), is((long) content.length())); + assertThat(bitstream.getChecksum(), is(expectedChecksum)); + assertThat(bitstream.getChecksumAlgorithm(), is(CSA)); + + InputStream inputStream = s3BitStoreService.get(bitstream); + assertThat(IOUtils.toString(inputStream, UTF_8), is(content)); + + String key = s3BitStoreService.getFullKey(bitstream.getInternalId()); + ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(bucketName, key); + assertThat(objectMetadata.getContentMD5(), is(expectedChecksum)); + + } + + @Test + public void testBitstreamPutAndGetWithoutSpecifingBucket() throws IOException { + + s3BitStoreService.init(); + + assertThat(s3BitStoreService.getBucketName(), is(DEFAULT_BUCKET_NAME)); + + assertThat(amazonS3Client.listBuckets(), contains(bucketNamed(DEFAULT_BUCKET_NAME))); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + String expectedChecksum = Utils.toHex(generateChecksum(content)); + + assertThat(bitstream.getSizeBytes(), is((long) content.length())); + assertThat(bitstream.getChecksum(), is(expectedChecksum)); + assertThat(bitstream.getChecksumAlgorithm(), is(CSA)); + + InputStream inputStream = s3BitStoreService.get(bitstream); + assertThat(IOUtils.toString(inputStream, UTF_8), is(content)); + + String key = s3BitStoreService.getFullKey(bitstream.getInternalId()); + ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(DEFAULT_BUCKET_NAME, key); + assertThat(objectMetadata.getContentMD5(), is(expectedChecksum)); + + } + + @Test + public void testBitstreamPutAndGetWithSubFolder() throws IOException { + + s3BitStoreService.setSubfolder("test/DSpace7/"); + s3BitStoreService.init(); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + InputStream inputStream = s3BitStoreService.get(bitstream); + assertThat(IOUtils.toString(inputStream, UTF_8), is(content)); + + String key = s3BitStoreService.getFullKey(bitstream.getInternalId()); + assertThat(key, startsWith("test/DSpace7/")); + + ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(DEFAULT_BUCKET_NAME, key); + assertThat(objectMetadata, notNullValue()); + + } + + @Test + public void testBitstreamDeletion() throws IOException { + + s3BitStoreService.init(); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + assertThat(s3BitStoreService.get(bitstream), notNullValue()); + + s3BitStoreService.remove(bitstream); + + IOException exception = assertThrows(IOException.class, () -> s3BitStoreService.get(bitstream)); + assertThat(exception.getCause(), instanceOf(AmazonS3Exception.class)); + assertThat(((AmazonS3Exception) exception.getCause()).getStatusCode(), is(404)); + + } + + @Test + public void testAbout() throws IOException { + + s3BitStoreService.init(); + + context.turnOffAuthorisationSystem(); + String content = "Test bitstream content"; + Bitstream bitstream = createBitstream(content); + context.restoreAuthSystemState(); + + s3BitStoreService.put(bitstream, toInputStream(content)); + + Map about = s3BitStoreService.about(bitstream, List.of()); + assertThat(about.size(), is(0)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about.size(), is(1)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about, hasEntry(is("modified"), notNullValue())); + assertThat(about.size(), is(2)); + + String expectedChecksum = Utils.toHex(generateChecksum(content)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about, hasEntry(is("modified"), notNullValue())); + assertThat(about, hasEntry("checksum", expectedChecksum)); + assertThat(about.size(), is(3)); + + about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum", "checksum_algorithm")); + assertThat(about, hasEntry("size_bytes", 22L)); + assertThat(about, hasEntry(is("modified"), notNullValue())); + assertThat(about, hasEntry("checksum", expectedChecksum)); + assertThat(about, hasEntry("checksum_algorithm", CSA)); + assertThat(about.size(), is(4)); + + } + + @Test + public void handleRegisteredIdentifierPrefixInS3() { + String trueBitStreamId = "012345"; + String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + trueBitStreamId; + // Should be detected as registered bitstream + assertTrue(this.s3BitStoreService.isRegisteredBitstream(registeredBitstreamId)); + } + + @Test + public void stripRegisteredBitstreamPrefixWhenCalculatingPath() { + // Set paths and IDs + String s3Path = "UNIQUE_S3_PATH/test/bitstream.pdf"; + String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + s3Path; + // Paths should be equal, since the getRelativePath method should strip the registered -R prefix + String relativeRegisteredPath = this.s3BitStoreService.getRelativePath(registeredBitstreamId); + assertEquals(s3Path, relativeRegisteredPath); + } + + @Test + public void givenBitStreamIdentifierLongerThanPossibleWhenIntermediatePathIsComputedThenIsSplittedAndTruncated() { + String path = "01234567890123456789"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenBitStreamIdentifierShorterThanAFolderLengthWhenIntermediatePathIsComputedThenIsSingleFolder() { + String path = "0"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "0" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenPartialBitStreamIdentifierWhenIntermediatePathIsComputedThenIsCompletlySplitted() { + String path = "01234"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "01" + File.separator + "23" + File.separator + "4" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenMaxLengthBitStreamIdentifierWhenIntermediatePathIsComputedThenIsSplittedAllAsSubfolder() { + String path = "012345"; + String computedPath = this.s3BitStoreService.getIntermediatePath(path); + String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator; + assertThat(computedPath, equalTo(expectedPath)); + } + + @Test + public void givenBitStreamIdentifierWhenIntermediatePathIsComputedThenNotEndingDoubleSlash() throws IOException { + StringBuilder path = new StringBuilder("01"); + String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + int slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("2"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + + path.append("3"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + + path.append("4"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + + path.append("56789"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator))); + } + + @Test + public void givenBitStreamIdentidierWhenIntermediatePathIsComputedThenMustBeSplitted() throws IOException { + StringBuilder path = new StringBuilder("01"); + String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + int slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("2"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("3"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("4"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + + path.append("56789"); + computedPath = this.s3BitStoreService.getIntermediatePath(path.toString()); + slashes = computeSlashes(path.toString()); + assertThat(computedPath, Matchers.endsWith(File.separator)); + assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes)); + } + + @Test + public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRemoved() { + String sInternalId = new StringBuilder("01") + .append(File.separator) + .append("22") + .append(File.separator) + .append("33") + .append(File.separator) + .append("4455") + .toString(); + String computedPath = this.s3BitStoreService.sanitizeIdentifier(sInternalId); + assertThat(computedPath, Matchers.not(Matchers.startsWith(File.separator))); + assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator))); + assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator))); + } + + private byte[] generateChecksum(String content) { + try { + MessageDigest m = MessageDigest.getInstance("MD5"); + m.update(content.getBytes()); + return m.digest(); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + } + + private AmazonS3 createAmazonS3Client() { + return AmazonS3ClientBuilder.standard() + .withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials())) + .withEndpointConfiguration(new EndpointConfiguration("http://127.0.0.1:8001", DEFAULT_REGION.getName())) + .build(); + } + + private Item createItem() { + return ItemBuilder.createItem(context, collection) + .withTitle("Test item") + .build(); + } + + private Bitstream createBitstream(String content) { + try { + return BitstreamBuilder + .createBitstream(context, createItem(), toInputStream(content)) + .build(); + } catch (SQLException | AuthorizeException | IOException e) { + throw new RuntimeException(e); + } + } + + private Matcher bucketNamed(String name) { + return LambdaMatcher.matches(bucket -> bucket.getName().equals(name)); + } + + private InputStream toInputStream(String content) { + return IOUtils.toInputStream(content, UTF_8); + } + + private int computeSlashes(String internalId) { + int minimum = internalId.length(); + int slashesPerLevel = minimum / S3BitStoreService.digitsPerLevel; + int odd = Math.min(1, minimum % S3BitStoreService.digitsPerLevel); + int slashes = slashesPerLevel + odd; + return Math.min(slashes, S3BitStoreService.directoryLevels); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/supervision/SupervisionOrderServiceIT.java b/dspace-api/src/test/java/org/dspace/supervision/SupervisionOrderServiceIT.java new file mode 100644 index 000000000000..60407823485b --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/supervision/SupervisionOrderServiceIT.java @@ -0,0 +1,385 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.supervision; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasSize; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.GroupBuilder; +import org.dspace.builder.WorkspaceItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.WorkspaceItem; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.supervision.factory.SupervisionOrderServiceFactory; +import org.dspace.supervision.service.SupervisionOrderService; +import org.junit.Test; + +/** + * Unit tests for the {@link SupervisionOrderService} + * + * @author Mohamed Eskander (mohamed.eskander at 4science dot it) + */ +public class SupervisionOrderServiceIT extends AbstractIntegrationTestWithDatabase { + + protected SupervisionOrderService supervisionOrderService = + SupervisionOrderServiceFactory.getInstance().getSupervisionOrderService(); + + @Test + public void createSupervisionOrderTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = + CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Collection collection = + CollectionBuilder.createCollection(context, parentCommunity) + .withName("collection") + .withEntityType("Publication") + .build(); + + WorkspaceItem workspaceItem = + WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("workspace item") + .withIssueDate("2023-01-24") + .grantLicense() + .build(); + + Item item = workspaceItem.getItem(); + + EPerson userA = + EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withEmail("userA@example.org") + .build(); + + EPerson userB = + EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withEmail("userB@example.org") + .build(); + + Group groupA = GroupBuilder.createGroup(context) + .withName("group A") + .addMember(userA) + .build(); + + Group groupB = GroupBuilder.createGroup(context) + .withName("group B") + .addMember(userB) + .build(); + + SupervisionOrder supervisionOrderOne = + supervisionOrderService.create(context, item, groupA); + + SupervisionOrder supervisionOrderTwo = + supervisionOrderService.create(context, item, groupB); + + context.restoreAuthSystemState(); + + assertThat(supervisionOrderOne, notNullValue()); + assertThat(supervisionOrderOne.getItem().getID(), is(item.getID())); + assertThat(supervisionOrderOne.getGroup().getID(), is(groupA.getID())); + + assertThat(supervisionOrderTwo, notNullValue()); + assertThat(supervisionOrderTwo.getItem().getID(), is(item.getID())); + assertThat(supervisionOrderTwo.getGroup().getID(), is(groupB.getID())); + + } + + @Test + public void findSupervisionOrderTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = + CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Collection collection = + CollectionBuilder.createCollection(context, parentCommunity) + .withName("collection") + .withEntityType("Publication") + .build(); + + WorkspaceItem workspaceItem = + WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("workspace item") + .withIssueDate("2023-01-24") + .grantLicense() + .build(); + + EPerson userA = + EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withEmail("userA@example.org") + .build(); + + Group groupA = GroupBuilder.createGroup(context) + .withName("group A") + .addMember(userA) + .build(); + + SupervisionOrder supervisionOrderOne = + supervisionOrderService.create(context, workspaceItem.getItem(), groupA); + + context.restoreAuthSystemState(); + + SupervisionOrder supervisionOrder = + supervisionOrderService.find(context, supervisionOrderOne.getID()); + + assertThat(supervisionOrder, notNullValue()); + assertThat(supervisionOrder.getID(), is(supervisionOrderOne.getID())); + + assertThat(supervisionOrder.getGroup().getID(), + is(supervisionOrderOne.getGroup().getID())); + + assertThat(supervisionOrder.getItem().getID(), + is(supervisionOrderOne.getItem().getID())); + + } + + @Test + public void findAllSupervisionOrdersTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = + CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Collection collection = + CollectionBuilder.createCollection(context, parentCommunity) + .withName("collection") + .withEntityType("Publication") + .build(); + + WorkspaceItem workspaceItem = + WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("workspace item") + .withIssueDate("2023-01-24") + .grantLicense() + .build(); + + WorkspaceItem workspaceItemTwo = + WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("workspace item two") + .withIssueDate("2023-01-25") + .grantLicense() + .build(); + + EPerson userA = + EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withEmail("userA@example.org") + .build(); + + EPerson userB = + EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withEmail("userB@example.org") + .build(); + + Group groupA = GroupBuilder.createGroup(context) + .withName("group A") + .addMember(userA) + .build(); + + Group groupB = GroupBuilder.createGroup(context) + .withName("group B") + .addMember(userB) + .build(); + + supervisionOrderService.create(context, workspaceItem.getItem(), groupA); + supervisionOrderService.create(context, workspaceItem.getItem(), groupB); + supervisionOrderService.create(context, workspaceItemTwo.getItem(), groupA); + + context.restoreAuthSystemState(); + + assertThat(supervisionOrderService.findAll(context), hasSize(3)); + } + + @Test + public void findSupervisionOrderByItemTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = + CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Collection collection = + CollectionBuilder.createCollection(context, parentCommunity) + .withName("collection") + .withEntityType("Publication") + .build(); + + WorkspaceItem workspaceItem = + WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("workspace item") + .withIssueDate("2023-01-24") + .grantLicense() + .build(); + + WorkspaceItem workspaceItemTwo = + WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("workspace item two") + .withIssueDate("2023-01-25") + .grantLicense() + .build(); + + EPerson userA = + EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withEmail("userA@example.org") + .build(); + + Group groupA = GroupBuilder.createGroup(context) + .withName("group A") + .addMember(userA) + .build(); + + Group groupB = GroupBuilder.createGroup(context) + .withName("group B") + .addMember(eperson) + .build(); + + supervisionOrderService.create(context, workspaceItem.getItem(), groupA); + supervisionOrderService.create(context, workspaceItem.getItem(), groupB); + supervisionOrderService.create(context, workspaceItemTwo.getItem(), groupA); + + context.restoreAuthSystemState(); + + assertThat(supervisionOrderService.findByItem(context, workspaceItem.getItem()), hasSize(2)); + assertThat(supervisionOrderService.findByItem(context, workspaceItemTwo.getItem()), hasSize(1)); + + } + + @Test + public void findSupervisionOrderByItemAndGroupTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = + CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Collection collection = + CollectionBuilder.createCollection(context, parentCommunity) + .withName("collection") + .withEntityType("Publication") + .build(); + + WorkspaceItem workspaceItem = + WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("workspace item") + .withIssueDate("2023-01-24") + .grantLicense() + .build(); + + Item item = workspaceItem.getItem(); + + EPerson userA = + EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withEmail("userA@example.org") + .build(); + + Group groupA = GroupBuilder.createGroup(context) + .withName("group A") + .addMember(userA) + .build(); + + Group groupB = GroupBuilder.createGroup(context) + .withName("group B") + .addMember(eperson) + .build(); + + supervisionOrderService.create(context, item, groupA); + + context.restoreAuthSystemState(); + + SupervisionOrder supervisionOrderA = + supervisionOrderService.findByItemAndGroup(context, item, groupA); + + assertThat(supervisionOrderA, notNullValue()); + assertThat(supervisionOrderA.getItem().getID(), is(item.getID())); + assertThat(supervisionOrderA.getGroup().getID(), is(groupA.getID())); + + // no supervision order on item and groupB + assertThat(supervisionOrderService.findByItemAndGroup(context, item, groupB), nullValue()); + + } + + @Test + public void isSupervisorTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = + CommunityBuilder.createCommunity(context) + .withName("parent community") + .build(); + + Collection collection = + CollectionBuilder.createCollection(context, parentCommunity) + .withName("collection") + .withEntityType("Publication") + .build(); + + WorkspaceItem workspaceItem = + WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("workspace item") + .withIssueDate("2023-01-24") + .grantLicense() + .build(); + + EPerson userA = + EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withEmail("userA@example.org") + .build(); + + EPerson userB = + EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withEmail("userB@example.org") + .build(); + + Group groupA = GroupBuilder.createGroup(context) + .withName("group A") + .addMember(userA) + .build(); + + GroupBuilder.createGroup(context) + .withName("group B") + .addMember(userB) + .build(); + + supervisionOrderService.create(context, workspaceItem.getItem(), groupA); + + context.restoreAuthSystemState(); + + assertThat(supervisionOrderService.isSupervisor( + context, userA, workspaceItem.getItem()), is(true)); + + // userB is not a supervisor on workspace Item + assertThat(supervisionOrderService.isSupervisor( + context, userB, workspaceItem.getItem()), is(false)); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/util/DSpaceConfigurationInitializer.java b/dspace-api/src/test/java/org/dspace/util/DSpaceConfigurationInitializer.java index 70d8f129903f..e2e0355f123a 100644 --- a/dspace-api/src/test/java/org/dspace/util/DSpaceConfigurationInitializer.java +++ b/dspace-api/src/test/java/org/dspace/util/DSpaceConfigurationInitializer.java @@ -9,8 +9,6 @@ import org.apache.commons.configuration2.Configuration; import org.apache.commons.configuration2.spring.ConfigurationPropertySource; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; import org.springframework.context.ApplicationContextInitializer; @@ -32,8 +30,6 @@ public class DSpaceConfigurationInitializer implements ApplicationContextInitializer { - private static final Logger log = LogManager.getLogger(); - @Override public void initialize(final ConfigurableApplicationContext applicationContext) { // Load DSpace Configuration service (requires kernel already initialized) diff --git a/dspace-api/src/test/java/org/dspace/util/DSpaceKernelInitializer.java b/dspace-api/src/test/java/org/dspace/util/DSpaceKernelInitializer.java index 93fd308185b7..a6f381bafbae 100644 --- a/dspace-api/src/test/java/org/dspace/util/DSpaceKernelInitializer.java +++ b/dspace-api/src/test/java/org/dspace/util/DSpaceKernelInitializer.java @@ -115,7 +115,7 @@ private String getDSpaceHome(ConfigurableEnvironment environment) { /** * Utility class that will destroy the DSpace Kernel on Spring shutdown. */ - private class DSpaceKernelDestroyer + private static class DSpaceKernelDestroyer implements ApplicationListener { private DSpaceKernel kernel; diff --git a/dspace-api/src/test/java/org/dspace/util/DoiCheckTest.java b/dspace-api/src/test/java/org/dspace/util/DoiCheckTest.java new file mode 100644 index 000000000000..17e21779d4fe --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/DoiCheckTest.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.text.ParseException; +import java.util.Arrays; +import java.util.List; + +import org.apache.commons.lang.StringUtils; +import org.dspace.importer.external.service.DoiCheck; +import org.junit.Test; + +/** + * Test class for the DoiCheck + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class DoiCheckTest { + + @Test + public void checkDOIsTest() throws ParseException { + for (String doi : DOIsToTest()) { + assertTrue("The: " + doi + " is a doi!", DoiCheck.isDoi(doi)); + } + } + + @Test + public void checkWrongDOIsTest() throws ParseException { + for (String key : wrongDOIsToTest()) { + assertFalse("This : " + key + " isn't a doi!", DoiCheck.isDoi(key)); + } + } + + private List DOIsToTest() { + return Arrays.asList( + "10.1430/8105", + "10.1038/nphys1170", + "10.1002/0470841559.ch1", + "10.1594/PANGAEA.726855", + "10.1594/GFZ.GEOFON.gfz2009kciu", + "10.3866/PKU.WHXB201112303", + "10.11467/isss2003.7.1_11", + "10.3972/water973.0145.db" + ); + } + + private List wrongDOIsToTest() { + return Arrays.asList( + StringUtils.EMPTY, + "123456789", + "nphys1170/10.1038", + "10.", "10", + "10.1038/" + ); + } + +} \ No newline at end of file diff --git a/dspace-api/src/test/java/org/dspace/util/FakeConsoleServiceImpl.java b/dspace-api/src/test/java/org/dspace/util/FakeConsoleServiceImpl.java new file mode 100644 index 000000000000..f34d58410d0b --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/FakeConsoleServiceImpl.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +/** + * A test version of ConsoleService which supplies any password input that we + * want. + * + *

    This can return different passwords on even/odd calls, to test + * confirmation dialogs. See {@link setPassword1} and {@link setPassword2}. + * Use {@link setPassword} to set both identically. + * + * @author Mark H. Wood + */ +public class FakeConsoleServiceImpl + implements ConsoleService { + private String prompt; + private Object[] args; + private char[] password1; + private char[] password2; + private int passwordCalls = 0; + + @Override + public char[] readPassword(String prompt, Object... args) { + this.prompt = prompt; + this.args = args; + passwordCalls++; + if (passwordCalls % 2 != 0) { + return password1; + } else { + return password2; + } + } + + public String getPasswordPrompt() { + return prompt; + } + + public Object[] getArgs() { + return this.args; + } + + /** + * Set both passwords identically. + * @param password the password to be returned each time. + */ + public void setPassword(char[] password) { + setPassword1(password); + setPassword2(password); + } + + /** + * Set the password returned on odd calls to {@link readPassword}. + * @param password the password to be returned. + */ + public void setPassword1(char[] password) { + password1 = password; + } + + /** + * Set the password returned on even calls to {@link readPassword}, + * and reset the call counter. + * @param password the password to be returned. + */ + public void setPassword2(char[] password) { + password2 = password; + passwordCalls = 0; + } +} diff --git a/dspace-api/src/test/java/org/dspace/util/RawJsonDeserializerTest.java b/dspace-api/src/test/java/org/dspace/util/RawJsonDeserializerTest.java new file mode 100644 index 000000000000..e1e6e246b992 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/RawJsonDeserializerTest.java @@ -0,0 +1,58 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import org.junit.Test; + +/** + * Unit tests for {@link RawJsonDeserializer}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class RawJsonDeserializerTest { + + private String json = "" + + "{" + + " \"attribute\": {" + + " \"firstField\":\"value\"," + + " \"secondField\": 1" + + " }" + + "}"; + + @Test + public void testDeserialization() throws JsonMappingException, JsonProcessingException { + + ObjectMapper mapper = new ObjectMapper(); + + DeserializationTestClass object = mapper.readValue(json, DeserializationTestClass.class); + assertThat(object, notNullValue()); + assertThat(object.getAttribute(), is("{\"firstField\":\"value\",\"secondField\":1}")); + + } + + private static class DeserializationTestClass { + + @JsonDeserialize(using = RawJsonDeserializer.class) + private String attribute; + + public String getAttribute() { + return attribute; + } + + } + +} diff --git a/dspace-api/src/test/java/org/dspace/util/RelationshipVersioningTestUtils.java b/dspace-api/src/test/java/org/dspace/util/RelationshipVersioningTestUtils.java new file mode 100644 index 000000000000..68f73734af95 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/RelationshipVersioningTestUtils.java @@ -0,0 +1,53 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +import org.dspace.content.Item; +import org.dspace.content.Relationship.LatestVersionStatus; +import org.dspace.content.RelationshipType; +import org.hamcrest.Matcher; + +/** + * Methods for testing relationships and their behavior with versioned items. + */ +public class RelationshipVersioningTestUtils { + + private RelationshipVersioningTestUtils() {} + + public static Matcher isRel( + Item leftItem, RelationshipType relationshipType, Item rightItem, LatestVersionStatus latestVersionStatus, + int leftPlace, int rightPlace + ) { + return isRel(leftItem, relationshipType, rightItem, latestVersionStatus, null, null, leftPlace, rightPlace); + } + + public static Matcher isRel( + Item leftItem, RelationshipType relationshipType, Item rightItem, LatestVersionStatus latestVersionStatus, + String leftwardValue, String rightwardValue, int leftPlace, int rightPlace + ) { + return allOf( + hasProperty("leftItem", is(leftItem)), + // NOTE: this is a painful one... class RelationshipType does not implement the equals method, so we cannot + // rely on object equality and have to compare ids instead. It has to be in capital letters, + // because the getter has been implemented inconsistently (#id vs #setId() vs #getID()). + hasProperty("relationshipType", hasProperty("ID", is(relationshipType.getID()))), + hasProperty("rightItem", is(rightItem)), + hasProperty("leftPlace", is(leftPlace)), + hasProperty("rightPlace", is(rightPlace)), + hasProperty("leftwardValue", leftwardValue == null ? nullValue() : is(leftwardValue)), + hasProperty("rightwardValue", rightwardValue == null ? nullValue() : is(rightwardValue)), + hasProperty("latestVersionStatus", is(latestVersionStatus)) + ); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/util/SimpleMapConverterTest.java b/dspace-api/src/test/java/org/dspace/util/SimpleMapConverterTest.java new file mode 100644 index 000000000000..b380c4e7ba94 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/SimpleMapConverterTest.java @@ -0,0 +1,171 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertThrows; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.io.FileUtils; +import org.dspace.services.ConfigurationService; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +/** + * Unit tests for {@link SimpleMapConverter}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@RunWith(MockitoJUnitRunner.class) +public class SimpleMapConverterTest { + + @Rule + public TemporaryFolder folder = new TemporaryFolder(); + + @Mock + private ConfigurationService configurationService; + + private File dspaceDir; + + private File crosswalksDir; + + @Before + public void before() throws IOException { + dspaceDir = folder.getRoot(); + crosswalksDir = folder.newFolder("config", "crosswalks"); + } + + @Test + public void testPropertiesParsing() throws IOException { + + when(configurationService.getProperty("dspace.dir")).thenReturn(dspaceDir.getAbsolutePath()); + createFileInFolder(crosswalksDir, "test.properties", "key1=value1\nkey2=value2\nkey3=value3"); + + SimpleMapConverter simpleMapConverter = new SimpleMapConverter(); + simpleMapConverter.setConfigurationService(configurationService); + simpleMapConverter.setConverterNameFile("test.properties"); + + simpleMapConverter.init(); + + assertThat(simpleMapConverter.getValue("key1"), is("value1")); + assertThat(simpleMapConverter.getValue("key2"), is("value2")); + assertThat(simpleMapConverter.getValue("key3"), is("value3")); + assertThat(simpleMapConverter.getValue(""), is("")); + assertThat(simpleMapConverter.getValue(null), nullValue()); + + assertThat(simpleMapConverter.getValue("key4"), is("key4")); + + } + + @Test + public void testPropertiesParsingWithDefaultValue() throws IOException { + + when(configurationService.getProperty("dspace.dir")).thenReturn(dspaceDir.getAbsolutePath()); + createFileInFolder(crosswalksDir, "test.properties", "key1=value1\nkey2=value2\nkey3=value3"); + + SimpleMapConverter simpleMapConverter = new SimpleMapConverter(); + simpleMapConverter.setConfigurationService(configurationService); + simpleMapConverter.setConverterNameFile("test.properties"); + simpleMapConverter.setDefaultValue("default"); + + simpleMapConverter.init(); + + assertThat(simpleMapConverter.getValue("key1"), is("value1")); + assertThat(simpleMapConverter.getValue("key2"), is("value2")); + assertThat(simpleMapConverter.getValue("key3"), is("value3")); + assertThat(simpleMapConverter.getValue(""), is("default")); + assertThat(simpleMapConverter.getValue(null), is("default")); + + assertThat(simpleMapConverter.getValue("key4"), is("default")); + + } + + @Test + public void testPropertiesParsingWithAnUnexistingFile() throws IOException { + + when(configurationService.getProperty("dspace.dir")).thenReturn(dspaceDir.getAbsolutePath()); + + SimpleMapConverter simpleMapConverter = new SimpleMapConverter(); + simpleMapConverter.setConfigurationService(configurationService); + simpleMapConverter.setConverterNameFile("test.properties"); + + IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, + () -> simpleMapConverter.init()); + + // Get path separator used for this platform (eg. / for Linux, \ for Windows) + String separator = File.separator; + + assertThat(exception.getMessage(), + is("An error occurs parsing " + dspaceDir.getAbsolutePath() + separator + "config" + separator + + "crosswalks" + separator + "test.properties")); + + Throwable cause = exception.getCause(); + assertThat(cause, notNullValue()); + assertThat(cause, instanceOf(FileNotFoundException.class)); + + } + + @Test + public void testPropertiesParsingWithCorruptedFile() throws IOException { + + when(configurationService.getProperty("dspace.dir")).thenReturn(dspaceDir.getAbsolutePath()); + createFileInFolder(crosswalksDir, "test.properties", "key1=value1\nkey2\nkey3=value3"); + + SimpleMapConverter simpleMapConverter = new SimpleMapConverter(); + simpleMapConverter.setConfigurationService(configurationService); + simpleMapConverter.setConverterNameFile("test.properties"); + + simpleMapConverter.init(); + + assertThat(simpleMapConverter.getValue("key1"), is("value1")); + assertThat(simpleMapConverter.getValue("key2"), is("key2")); + assertThat(simpleMapConverter.getValue("key3"), is("value3")); + + assertThat(simpleMapConverter.getValue("key4"), is("key4")); + + + } + + @Test + public void testPropertiesParsingWithEmptyFile() throws IOException { + + when(configurationService.getProperty("dspace.dir")).thenReturn(dspaceDir.getAbsolutePath()); + createFileInFolder(crosswalksDir, "test.properties", ""); + + SimpleMapConverter simpleMapConverter = new SimpleMapConverter(); + simpleMapConverter.setConfigurationService(configurationService); + simpleMapConverter.setConverterNameFile("test.properties"); + + simpleMapConverter.init(); + + assertThat(simpleMapConverter.getValue("key1"), is("key1")); + assertThat(simpleMapConverter.getValue("key2"), is("key2")); + + } + + private void createFileInFolder(File folder, String name, String content) throws IOException { + File file = new File(folder, name); + FileUtils.write(file, content, StandardCharsets.UTF_8); + } + +} diff --git a/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java b/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java new file mode 100644 index 000000000000..12055140a2f7 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.junit.Assert.assertEquals; + +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Date; + +import org.junit.Test; + +/** + * Test {@link TimeHelpers}. + * @author Mark H. Wood + */ +public class TimeHelpersTest { + /** + * Test of toMidnightUTC method, of class TimeHelpers. + */ + @Test + public void testToMidnightUTC() { + System.out.println("toMidnightUTC"); + Date from = Date.from(ZonedDateTime.of(1957, 01, 27, 04, 05, 06, 007, ZoneOffset.UTC).toInstant()); + Date expResult = Date.from(ZonedDateTime.of(1957, 01, 27, 00, 00, 00, 000, ZoneOffset.UTC).toInstant()); + Date result = TimeHelpers.toMidnightUTC(from); + assertEquals(expResult, result); + } +} diff --git a/dspace-api/src/test/java/org/dspace/xmlworkflow/XmlWorkflowServiceIT.java b/dspace-api/src/test/java/org/dspace/xmlworkflow/XmlWorkflowServiceIT.java new file mode 100644 index 000000000000..865abaca2152 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/xmlworkflow/XmlWorkflowServiceIT.java @@ -0,0 +1,222 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xmlworkflow; + +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.List; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.AbstractIntegrationTestWithDatabase; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.builder.ClaimedTaskBuilder; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.GroupBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.core.Constants; +import org.dspace.discovery.IndexingService; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; +import org.dspace.xmlworkflow.service.XmlWorkflowService; +import org.dspace.xmlworkflow.state.Workflow; +import org.dspace.xmlworkflow.state.actions.processingaction.SelectReviewerAction; +import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; +import org.junit.After; +import org.junit.Test; +import org.springframework.mock.web.MockHttpServletRequest; + +/** + * IT for {@link XmlWorkflowServiceImpl} + * + * @author Maria Verdonck (Atmire) on 14/12/21 + */ +public class XmlWorkflowServiceIT extends AbstractIntegrationTestWithDatabase { + + protected XmlWorkflowService xmlWorkflowService = XmlWorkflowServiceFactory.getInstance().getXmlWorkflowService(); + protected IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName(IndexingService.class.getName(), + IndexingService.class); + protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); + protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + /** + * Cleans up the created workflow role groups after each test + * @throws SQLException + * @throws AuthorizeException + * @throws IOException + */ + @After + public void cleanup() throws SQLException, AuthorizeException, IOException { + Group reviewManagers = groupService.findByName(context, "ReviewManagers"); + if (reviewManagers != null) { + groupService.delete(context, reviewManagers); + } + } + + /** + * Test to verify that if a user submits an item into the workflow, then it gets rejected that the submitter gets + * write access back on the item + * + * @throws Exception + */ + @Test + public void workflowUserRejectsItemTheySubmitted_ItemShouldBeEditable() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build(); + context.setCurrentUser(submitter); + Community community = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection colWithWorkflow = CollectionBuilder.createCollection(context, community) + .withName("Collection WITH workflow") + .withWorkflowGroup(1, submitter) + .build(); + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(colWithWorkflow); + ClaimedTask taskToReject = ClaimedTaskBuilder.createClaimedTask(context, colWithWorkflow, submitter) + .withTitle("Test workflow item to reject").build(); + context.restoreAuthSystemState(); + + // Submitter person is both original submitter as well as reviewer, should have edit access of claimed task + assertTrue(this.containsRPForUser(taskToReject.getWorkflowItem().getItem(), submitter, Constants.WRITE)); + + // reject + MockHttpServletRequest httpRejectRequest = new MockHttpServletRequest(); + httpRejectRequest.setParameter("submit_reject", "submit_reject"); + httpRejectRequest.setParameter("reason", "test"); + executeWorkflowAction(httpRejectRequest, workflow, taskToReject); + + // Submitter person is both original submitter as well as reviewer, should have edit access of reject, i.e. + // sent back/to submission task + assertTrue(this.containsRPForUser(taskToReject.getWorkflowItem().getItem(), submitter, Constants.WRITE)); + } + + /** + * Test to verify that if a user submits an item into the workflow, a reviewmanager can select a single reviewer + * eperson + */ + @Test + public void workflowUserSingleSelectedReviewer_ItemShouldBeEditable() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build(); + context.setCurrentUser(submitter); + EPerson reviewManager = + EPersonBuilder.createEPerson(context).withEmail("reviewmanager-test@example.org").build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection colWithWorkflow = CollectionBuilder.createCollection(context, community, "123456789/workflow-test-1") + .withName("Collection WITH workflow") + .withWorkflowGroup("reviewmanagers", reviewManager) + .build(); + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(colWithWorkflow); + ClaimedTask task = ClaimedTaskBuilder.createClaimedTask(context, colWithWorkflow, reviewManager) + .withTitle("Test workflow item to reject").build(); + // Set reviewer group property and add reviewer to group + SelectReviewerAction.resetGroup(); + configurationService.setProperty("action.selectrevieweraction.group", "Reviewers"); + Group reviewerGroup = GroupBuilder.createGroup(context).withName("Reviewers").build(); + EPerson reviewer = EPersonBuilder.createEPerson(context).withEmail("reviewer@example.org").build(); + groupService.addMember(context, reviewerGroup, reviewer); + context.restoreAuthSystemState(); + + // Review Manager should have access to workflow item + assertTrue(this.containsRPForUser(task.getWorkflowItem().getItem(), reviewManager, Constants.WRITE)); + + // select 1 reviewer + MockHttpServletRequest httpSelectReviewerRequest = new MockHttpServletRequest(); + httpSelectReviewerRequest.setParameter("submit_select_reviewer", "true"); + httpSelectReviewerRequest.setParameter("eperson", reviewer.getID().toString()); + executeWorkflowAction(httpSelectReviewerRequest, workflow, task); + + // Reviewer should have access to workflow item + assertTrue(this.containsRPForUser(task.getWorkflowItem().getItem(), reviewer, Constants.WRITE)); + } + + /** + * Test to verify that if a user submits an item into the workflow, a reviewmanager can select a multiple reviewer + * epersons + */ + @Test + public void workflowUserMultipleSelectedReviewer_ItemShouldBeEditable() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build(); + context.setCurrentUser(submitter); + EPerson reviewManager = + EPersonBuilder.createEPerson(context).withEmail("reviewmanager-test@example.org").build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection colWithWorkflow = CollectionBuilder.createCollection(context, community, "123456789/workflow-test-1") + .withName("Collection WITH workflow") + .withWorkflowGroup("reviewmanagers", reviewManager) + .build(); + Workflow workflow = XmlWorkflowServiceFactory.getInstance().getWorkflowFactory().getWorkflow(colWithWorkflow); + ClaimedTask task = ClaimedTaskBuilder.createClaimedTask(context, colWithWorkflow, reviewManager) + .withTitle("Test workflow item to reject").build(); + // Set reviewer group property and add reviewer to group + SelectReviewerAction.resetGroup(); + configurationService.setProperty("action.selectrevieweraction.group", "Reviewers"); + Group reviewerGroup = GroupBuilder.createGroup(context).withName("Reviewers").build(); + EPerson reviewer1 = EPersonBuilder.createEPerson(context).withEmail("reviewer1@example.org").build(); + EPerson reviewer2 = EPersonBuilder.createEPerson(context).withEmail("reviewer2@example.org").build(); + groupService.addMember(context, reviewerGroup, reviewer1); + groupService.addMember(context, reviewerGroup, reviewer2); + context.restoreAuthSystemState(); + + // Review Manager should have access to workflow item + assertTrue(this.containsRPForUser(task.getWorkflowItem().getItem(), reviewManager, Constants.WRITE)); + + // Select multiple reviewers + MockHttpServletRequest httpSelectMultipleReviewers = new MockHttpServletRequest(); + httpSelectMultipleReviewers.setParameter("submit_select_reviewer", "true"); + httpSelectMultipleReviewers.setParameter("eperson", reviewer1.getID().toString(), reviewer2.getID().toString()); + executeWorkflowAction(httpSelectMultipleReviewers, workflow, task); + + // Reviewers should have access to workflow item + assertTrue(this.containsRPForUser(task.getWorkflowItem().getItem(), reviewer1, Constants.WRITE)); + assertTrue(this.containsRPForUser(task.getWorkflowItem().getItem(), reviewer2, Constants.WRITE)); + } + + private boolean containsRPForUser(Item item, EPerson user, int action) throws SQLException { + List rps = authorizeService.getPolicies(context, item); + for (ResourcePolicy rp : rps) { + if (rp.getEPerson().getID().equals(user.getID()) && rp.getAction() == action) { + return true; + } + } + return false; + } + + private void executeWorkflowAction(HttpServletRequest httpServletRequest, Workflow workflow, ClaimedTask task) + throws Exception { + final EPerson previousUser = context.getCurrentUser(); + task = context.reloadEntity(task); + context.setCurrentUser(task.getOwner()); + xmlWorkflowService + .doState(context, task.getOwner(), httpServletRequest, task.getWorkflowItem().getID(), workflow, + workflow.getStep(task.getStepID()).getActionConfig(task.getActionID())); + context.commit(); + indexer.commit(); + context.setCurrentUser(previousUser); + } +} diff --git a/dspace-api/src/test/resources/log4j.properties b/dspace-api/src/test/resources/log4j.properties deleted file mode 100644 index 2797b7c65592..000000000000 --- a/dspace-api/src/test/resources/log4j.properties +++ /dev/null @@ -1,58 +0,0 @@ -# -# The contents of this file are subject to the license and copyright -# detailed in the LICENSE and NOTICE files at the root of the source -# tree and available online at -# -# http://www.dspace.org/license/ -# -########################################################################### -# -# log4j.properties -# -# -########################################################################### - -# This is a copy of the log4j configuration file for DSpace, to avoid -# getting errors when running tests. - -# Set root category priority to INFO and its only appender to A1. -log4j.rootCategory=INFO, A1 - -# A1 is set to be a ConsoleAppender. -log4j.appender.A1=org.apache.log4j.ConsoleAppender - -# A1 uses PatternLayout. -log4j.appender.A1.layout=org.apache.logging.log4j.PatternLayout -log4j.appender.A1.layout.ConversionPattern=%d %-5p %c @ %m%n - -########################################################################### -# Other settings -########################################################################### - -# Block passwords from being exposed in Axis logs. -# (DEBUG exposes passwords in Basic Auth) -log4j.logger.org.apache.axis.handlers.http.HTTPAuthHandler=INFO - -# Block services logging except on exceptions -log4j.logger.org.dspace.kernel=ERROR -log4j.logger.org.dspace.services=ERROR -log4j.logger.org.dspace.servicemanager=ERROR -log4j.logger.org.dspace.providers=ERROR -log4j.logger.org.dspace.utils=ERROR - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.Target=System.out -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c{1}:%L - %m%n -# -# Root logger option -log4j.rootLogger=INFO, stdout - -# Hibernate logging options (INFO only shows startup messages) -log4j.logger.org.hibernate=INFO - -# For detailed Hibernate logging in Unit Tests, you can enable the following -# setting which logs all JDBC bind parameter runtime arguments. -# This will drastically increase the size of Unit Test logs though. -#log4j.logger.org.hibernate.SQL=DEBUG, A1 -#log4j.logger.org.hibernate.type=TRACE, A1 diff --git a/dspace-api/src/test/resources/log4j2-test.xml b/dspace-api/src/test/resources/log4j2-test.xml new file mode 100644 index 000000000000..e0cd82191278 --- /dev/null +++ b/dspace-api/src/test/resources/log4j2-test.xml @@ -0,0 +1,56 @@ + + + + + + DEBUG + + + INFO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core-person.xml b/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core-person.xml new file mode 100644 index 000000000000..4d530630ba8a --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core-person.xml @@ -0,0 +1,3 @@ + + Person Test + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core.xml b/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core.xml new file mode 100644 index 000000000000..a1afbb417ab4 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/itemimport/dublin_core.xml @@ -0,0 +1,5 @@ + + A Tale of Two Cities + 1990 + J'aime les Printemps + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/metadata_dcterms.xml b/dspace-api/src/test/resources/org/dspace/app/itemimport/metadata_dcterms.xml new file mode 100644 index 000000000000..8d8e3a8d54a8 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/itemimport/metadata_dcterms.xml @@ -0,0 +1,3 @@ + + A Tale of Two Cities + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/relationships b/dspace-api/src/test/resources/org/dspace/app/itemimport/relationships new file mode 100644 index 000000000000..e8ec1985ce4d --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/itemimport/relationships @@ -0,0 +1 @@ +relation.isAuthorOfPublication folderName:item_001 \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-bitstreams.zip b/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-bitstreams.zip new file mode 100755 index 000000000000..35be57e897c8 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-bitstreams.zip differ diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-relationships.zip b/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-relationships.zip new file mode 100755 index 000000000000..d41e7c6eb8b7 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/itemimport/saf-relationships.zip differ diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf b/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf new file mode 100644 index 000000000000..5b3749cbff73 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.csv b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.csv new file mode 100644 index 000000000000..07c22ff0bfb9 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.csv @@ -0,0 +1,4 @@ +row1,row2,row3,row4 +"data1,2","data 2,2","data3,2","data4,2" +"data1,3","data 2,3","data3,3","data4,3" +"data1,4","data2,4","data3,4","data4,4" diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/wordtest.doc b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.doc similarity index 100% rename from dspace-api/src/test/resources/org/dspace/app/mediafilter/wordtest.doc rename to dspace-api/src/test/resources/org/dspace/app/mediafilter/test.doc diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/wordtest.docx b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.docx similarity index 100% rename from dspace-api/src/test/resources/org/dspace/app/mediafilter/wordtest.docx rename to dspace-api/src/test/resources/org/dspace/app/mediafilter/test.docx diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.html b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.html new file mode 100644 index 000000000000..7655f566cc35 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.html @@ -0,0 +1,53 @@ + + + + +A Text Extraction Test Document for DSpace + + + + +
    + +

    A Text Extraction Test Document

    + +

    for

    + +

    DSpace

    + +

    + +

    This is a text. For the next sixty seconds this software +will conduct a test of the DSpace text extraction facility. This is only a +text.

    + +

    This is a paragraph that followed the first that lived in +the document that Jack built.

    + +

    Lorem ipsum dolor sit amet. The quick brown fox jumped over +the lazy dog. Yow! Are we having fun yet?

    + +

    This has been a test of the DSpace text extraction system. +In the event of actual content you would care what is written here.

    + +
    + +
    + +
    + +
    + +

    Tip o’ the hat to the U.S. Emergency Broadcast System for the format that I have +irreverently borrowed.

    + +
    + + + + + + diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odp b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odp new file mode 100644 index 000000000000..4701884a8a62 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odp differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ods b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ods new file mode 100644 index 000000000000..94ad873c1a89 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ods differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odt b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odt new file mode 100644 index 000000000000..3c996a1f46c4 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.odt differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pdf b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pdf new file mode 100644 index 000000000000..5b3749cbff73 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pdf differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ppt b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ppt new file mode 100644 index 000000000000..bb3a3d6b41e2 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.ppt differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pptx b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pptx new file mode 100644 index 000000000000..2c27ad1630b9 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.pptx differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.rtf b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.rtf new file mode 100644 index 000000000000..3b841917b27b --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.rtf @@ -0,0 +1,239 @@ +{\rtf1\adeflang1025\ansi\ansicpg1252\uc1\adeff46\deff0\stshfdbch45\stshfloch43\stshfhich43\stshfbi46\deflang1033\deflangfe1033\themelang1033\themelangfe0\themelangcs0{\fonttbl{\f34\fbidi \froman\fcharset0\fprq2{\*\panose 02040503050406030204}Cambria Math;}{\f43\fbidi \froman\fcharset0\fprq2 Liberation Serif{\*\falt Times New Roman};} +{\f44\fbidi \fswiss\fcharset0\fprq2 Liberation Sans{\*\falt Arial};}{\f45\fbidi \froman\fcharset0\fprq0{\*\panose 00000000000000000000}AR PL SungtiL GB;}{\f46\fbidi \froman\fcharset0\fprq0{\*\panose 00000000000000000000}Lohit Hindi;} +{\flomajor\f31500\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\fdbmajor\f31501\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} +{\fhimajor\f31502\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0302020204030204}Calibri Light;}{\fbimajor\f31503\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} +{\flominor\f31504\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\fdbminor\f31505\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;} +{\fhiminor\f31506\fbidi \fswiss\fcharset0\fprq2{\*\panose 020f0502020204030204}Calibri;}{\fbiminor\f31507\fbidi \froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\f1504\fbidi \froman\fcharset238\fprq2 Cambria Math CE;} +{\f1505\fbidi \froman\fcharset204\fprq2 Cambria Math Cyr;}{\f1507\fbidi \froman\fcharset161\fprq2 Cambria Math Greek;}{\f1508\fbidi \froman\fcharset162\fprq2 Cambria Math Tur;}{\f1511\fbidi \froman\fcharset186\fprq2 Cambria Math Baltic;} +{\f1512\fbidi \froman\fcharset163\fprq2 Cambria Math (Vietnamese);}{\flomajor\f31508\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\flomajor\f31509\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;} +{\flomajor\f31511\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\flomajor\f31512\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\flomajor\f31513\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);} +{\flomajor\f31514\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\flomajor\f31515\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\flomajor\f31516\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);} +{\fdbmajor\f31518\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fdbmajor\f31519\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fdbmajor\f31521\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;} +{\fdbmajor\f31522\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fdbmajor\f31523\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fdbmajor\f31524\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);} +{\fdbmajor\f31525\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fdbmajor\f31526\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fhimajor\f31528\fbidi \fswiss\fcharset238\fprq2 Calibri Light CE;} +{\fhimajor\f31529\fbidi \fswiss\fcharset204\fprq2 Calibri Light Cyr;}{\fhimajor\f31531\fbidi \fswiss\fcharset161\fprq2 Calibri Light Greek;}{\fhimajor\f31532\fbidi \fswiss\fcharset162\fprq2 Calibri Light Tur;} +{\fhimajor\f31533\fbidi \fswiss\fcharset177\fprq2 Calibri Light (Hebrew);}{\fhimajor\f31534\fbidi \fswiss\fcharset178\fprq2 Calibri Light (Arabic);}{\fhimajor\f31535\fbidi \fswiss\fcharset186\fprq2 Calibri Light Baltic;} +{\fhimajor\f31536\fbidi \fswiss\fcharset163\fprq2 Calibri Light (Vietnamese);}{\fbimajor\f31538\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fbimajor\f31539\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;} +{\fbimajor\f31541\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fbimajor\f31542\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fbimajor\f31543\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);} +{\fbimajor\f31544\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fbimajor\f31545\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fbimajor\f31546\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);} +{\flominor\f31548\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\flominor\f31549\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\flominor\f31551\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;} +{\flominor\f31552\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\flominor\f31553\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\flominor\f31554\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);} +{\flominor\f31555\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\flominor\f31556\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fdbminor\f31558\fbidi \froman\fcharset238\fprq2 Times New Roman CE;} +{\fdbminor\f31559\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fdbminor\f31561\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\fdbminor\f31562\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;} +{\fdbminor\f31563\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fdbminor\f31564\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\fdbminor\f31565\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;} +{\fdbminor\f31566\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\fhiminor\f31568\fbidi \fswiss\fcharset238\fprq2 Calibri CE;}{\fhiminor\f31569\fbidi \fswiss\fcharset204\fprq2 Calibri Cyr;} +{\fhiminor\f31571\fbidi \fswiss\fcharset161\fprq2 Calibri Greek;}{\fhiminor\f31572\fbidi \fswiss\fcharset162\fprq2 Calibri Tur;}{\fhiminor\f31573\fbidi \fswiss\fcharset177\fprq2 Calibri (Hebrew);} +{\fhiminor\f31574\fbidi \fswiss\fcharset178\fprq2 Calibri (Arabic);}{\fhiminor\f31575\fbidi \fswiss\fcharset186\fprq2 Calibri Baltic;}{\fhiminor\f31576\fbidi \fswiss\fcharset163\fprq2 Calibri (Vietnamese);} +{\fbiminor\f31578\fbidi \froman\fcharset238\fprq2 Times New Roman CE;}{\fbiminor\f31579\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\fbiminor\f31581\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;} +{\fbiminor\f31582\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\fbiminor\f31583\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);}{\fbiminor\f31584\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);} +{\fbiminor\f31585\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\fbiminor\f31586\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}{\f1164\fbidi \froman\fcharset238\fprq2 Times New Roman CE;} +{\f1165\fbidi \froman\fcharset204\fprq2 Times New Roman Cyr;}{\f1167\fbidi \froman\fcharset161\fprq2 Times New Roman Greek;}{\f1168\fbidi \froman\fcharset162\fprq2 Times New Roman Tur;}{\f1169\fbidi \froman\fcharset177\fprq2 Times New Roman (Hebrew);} +{\f1170\fbidi \froman\fcharset178\fprq2 Times New Roman (Arabic);}{\f1171\fbidi \froman\fcharset186\fprq2 Times New Roman Baltic;}{\f1172\fbidi \froman\fcharset163\fprq2 Times New Roman (Vietnamese);}}{\colortbl;\red0\green0\blue0;\red0\green0\blue255; +\red0\green255\blue255;\red0\green255\blue0;\red255\green0\blue255;\red255\green0\blue0;\red255\green255\blue0;\red255\green255\blue255;\red0\green0\blue128;\red0\green128\blue128;\red0\green128\blue0;\red128\green0\blue128;\red128\green0\blue0; +\red128\green128\blue0;\red128\green128\blue128;\red192\green192\blue192;\red0\green0\blue0;\red0\green0\blue0;}{\*\defchp \fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\langfenp2052 }{\*\defpap +\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 }\noqfpromote {\stylesheet{\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \snext0 \sqformat \spriority0 Normal;}{\*\cs10 \additive \ssemihidden \sunhideused \spriority1 Default Paragraph Font;}{\* +\ts11\tsrowd\trftsWidthB3\trpaddl108\trpaddr108\trpaddfl3\trpaddft3\trpaddfb3\trpaddfr3\trcbpat1\trcfpat1\tblind0\tblindtype3\tsvertalt\tsbrdrt\tsbrdrl\tsbrdrb\tsbrdrr\tsbrdrdgl\tsbrdrdgr\tsbrdrh\tsbrdrv +\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 \fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \snext11 \ssemihidden \sunhideused +Normal Table;}{\*\cs15 \additive \sqformat \spriority0 Footnote Characters;}{\*\cs16 \additive \super \spriority0 Footnote Anchor;}{\*\cs17 \additive \super \spriority0 Endnote Anchor;}{\*\cs18 \additive \sqformat \spriority0 Endnote Characters;}{ +\s19\ql \li0\ri0\sb240\sa120\keepn\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs28\alang1081 \ltrch\fcs0 \fs28\lang1033\langfe2052\loch\f44\hich\af44\dbch\af45\cgrid\langnp1033\langfenp2052 +\sbasedon0 \snext20 \sqformat \spriority0 Heading;}{\s20\ql \li0\ri0\sa140\sl288\slmult1\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon0 \snext20 \spriority0 Body Text;}{\s21\ql \li0\ri0\sa140\sl288\slmult1\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 +\af46\afs24\alang1081 \ltrch\fcs0 \fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon20 \snext21 \spriority0 List;}{ +\s22\ql \li0\ri0\sb120\sa120\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \ai\af46\afs24\alang1081 \ltrch\fcs0 \i\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 +\sbasedon0 \snext22 \sqformat \spriority0 caption;}{\s23\ql \li0\ri0\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon0 \snext23 \sqformat \spriority0 Index;}{\s24\ql \fi-339\li339\ri0\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin339\itap0 \rtlch\fcs1 +\af46\afs20\alang1081 \ltrch\fcs0 \fs20\lang1033\langfe2052\loch\f43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 \sbasedon0 \snext24 \spriority0 footnote text;}}{\*\rsidtbl \rsid6097384\rsid16590483\rsid16671749}{\mmathPr\mmathFont34\mbrkBin0 +\mbrkBinSub0\msmallFrac0\mdispDef1\mlMargin0\mrMargin0\mdefJc1\mwrapIndent1440\mintLim0\mnaryLim1}{\info{\title A Text Extraction Test Document for DSpace}{\author Mark Wood}{\operator Tim Donohue}{\creatim\yr2022\mo3\dy30\hr13\min54} +{\revtim\yr2022\mo3\dy30\hr13\min54}{\version2}{\edmins0}{\nofpages1}{\nofwords75}{\nofchars433}{\nofcharsws507}{\vern43}}{\*\xmlnstbl {\xmlns1 http://schemas.microsoft.com/office/word/2003/wordml}} +\paperw12240\paperh15840\margl1134\margr1134\margt1134\margb1134\gutter0\ltrsect +\deftab709\widowctrl\ftnbj\aenddoc\trackmoves0\trackformatting1\donotembedsysfont1\relyonvml0\donotembedlingdata0\grfdocevents0\validatexml1\showplaceholdtext0\ignoremixedcontent0\saveinvalidxml0\showxmlerrors1 +\noxlattoyen\expshrtn\noultrlspc\dntblnsbdb\nospaceforul\formshade\horzdoc\dgmargin\dghspace180\dgvspace180\dghorigin450\dgvorigin0\dghshow1\dgvshow1 +\jexpand\viewkind5\viewscale100\pgbrdrhead\pgbrdrfoot\splytwnine\ftnlytwnine\htmautsp\nolnhtadjtbl\useltbaln\alntblind\lytcalctblwd\lyttblrtgr\lnbrkrule\nobrkwrptbl\snaptogridincell\allowfieldendsel\wrppunct +\asianbrkrule\rsidroot6097384\newtblstyruls\nogrowautofit\usenormstyforlist\noindnmbrts\felnbrelev\nocxsptable\indrlsweleven\noafcnsttbl\afelev\utinl\hwelev\spltpgpar\notcvasp\notbrkcnstfrctbl\notvatxbx\krnprsnet\cachedcolbal \nouicompat \fet0 +{\*\wgrffmtfilter 2450}\nofeaturethrottle1\ilfomacatclnup0{\*\ftnsep \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsep +\par }}{\*\ftnsepc \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsepc +\par }}{\*\aftnsep \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsep +\par }}{\*\aftnsepc \ltrpar \pard\plain \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 +\fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftnsepc +\par }}\ltrpar \sectd \ltrsect\linex0\headery0\footery0\endnhere\sectunlocked1\sectdefaultcl\sftnbj {\*\pnseclvl1\pnucrm\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl2\pnucltr\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl3 +\pndec\pnstart1\pnindent720\pnhang {\pntxta .}}{\*\pnseclvl4\pnlcltr\pnstart1\pnindent720\pnhang {\pntxta )}}{\*\pnseclvl5\pndec\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl6\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}} +{\*\pnseclvl7\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl8\pnlcltr\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}{\*\pnseclvl9\pnlcrm\pnstart1\pnindent720\pnhang {\pntxtb (}{\pntxta )}}\pard\plain \ltrpar +\qc \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 \rtlch\fcs1 \af46\afs24\alang1081 \ltrch\fcs0 \fs24\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 +\fs30\insrsid16671749 \hich\af43\dbch\af45\loch\f43 A Text Extraction Test Document}{\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 \fs30\insrsid6097384 +\par }{\rtlch\fcs1 \af46\afs20 \ltrch\fcs0 \fs20\insrsid16671749 \hich\af43\dbch\af45\loch\f43 for}{\rtlch\fcs1 \af46\afs20 \ltrch\fcs0 \fs20\insrsid6097384 +\par }{\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 \fs30\insrsid16671749 \hich\af43\dbch\af45\loch\f43 DSpace}{\rtlch\fcs1 \af46\afs30 \ltrch\fcs0 \fs30\insrsid6097384 +\par +\par }\pard \ltrpar\ql \li0\ri0\widctlpar\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin0\itap0 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 +This is a text. For the next sixty seconds this software will conduct a test of the DSpace text extraction facility. This is only a text.}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384 +\par +\par }{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 This is a paragraph that followed the first that lived in the \hich\af43\dbch\af45\loch\f43 document that Jack built.}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384 +\par +\par }{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 Lorem ipsum dolor sit amet. The quick brown fox jumped over the lazy dog. Yow! Are we having fun yet?}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384 +\par +\par }{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \hich\af43\dbch\af45\loch\f43 This has been a test of the DSpace text extraction system. In the event of actual content you would care what is written he\hich\af43\dbch\af45\loch\f43 re.}{\rtlch\fcs1 +\af46 \ltrch\fcs0 \cs16\super\insrsid16671749 \chftn {\footnote \ltrpar \pard\plain \ltrpar\s24\ql \fi-339\li339\ri0\widctlpar\noline\wrapdefault\aspalpha\aspnum\faauto\adjustright\rin0\lin339\itap0 \rtlch\fcs1 \af46\afs20\alang1081 \ltrch\fcs0 +\fs20\lang1033\langfe2052\loch\af43\hich\af43\dbch\af45\cgrid\langnp1033\langfenp2052 {\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid16671749 \chftn \tab \hich\af43\dbch\af45\loch\f43 Tip o\hich\f43 \rquote \loch\f43 + the hat to the U.S. Emergency Broadcast System for the format that I have irreverently borrowed.}}}{\rtlch\fcs1 \af46 \ltrch\fcs0 \insrsid6097384 +\par }{\*\themedata 504b030414000600080000002100e9de0fbfff0000001c020000130000005b436f6e74656e745f54797065735d2e786d6cac91cb4ec3301045f748fc83e52d4a +9cb2400825e982c78ec7a27cc0c8992416c9d8b2a755fbf74cd25442a820166c2cd933f79e3be372bd1f07b5c3989ca74aaff2422b24eb1b475da5df374fd9ad +5689811a183c61a50f98f4babebc2837878049899a52a57be670674cb23d8e90721f90a4d2fa3802cb35762680fd800ecd7551dc18eb899138e3c943d7e503b6 +b01d583deee5f99824e290b4ba3f364eac4a430883b3c092d4eca8f946c916422ecab927f52ea42b89a1cd59c254f919b0e85e6535d135a8de20f20b8c12c3b0 +0c895fcf6720192de6bf3b9e89ecdbd6596cbcdd8eb28e7c365ecc4ec1ff1460f53fe813d3cc7f5b7f020000ffff0300504b030414000600080000002100a5d6 +a7e7c0000000360100000b0000005f72656c732f2e72656c73848fcf6ac3300c87ef85bd83d17d51d2c31825762fa590432fa37d00e1287f68221bdb1bebdb4f +c7060abb0884a4eff7a93dfeae8bf9e194e720169aaa06c3e2433fcb68e1763dbf7f82c985a4a725085b787086a37bdbb55fbc50d1a33ccd311ba548b6309512 +0f88d94fbc52ae4264d1c910d24a45db3462247fa791715fd71f989e19e0364cd3f51652d73760ae8fa8c9ffb3c330cc9e4fc17faf2ce545046e37944c69e462 +a1a82fe353bd90a865aad41ed0b5b8f9d6fd010000ffff0300504b0304140006000800000021006b799616830000008a0000001c0000007468656d652f746865 +6d652f7468656d654d616e616765722e786d6c0ccc4d0ac3201040e17da17790d93763bb284562b2cbaebbf600439c1a41c7a0d29fdbd7e5e38337cedf14d59b +4b0d592c9c070d8a65cd2e88b7f07c2ca71ba8da481cc52c6ce1c715e6e97818c9b48d13df49c873517d23d59085adb5dd20d6b52bd521ef2cdd5eb9246a3d8b +4757e8d3f729e245eb2b260a0238fd010000ffff0300504b030414000600080000002100b6f4679893070000c9200000160000007468656d652f7468656d652f +7468656d65312e786d6cec59cd8b1bc915bf07f23f347d97f5d5ad8fc1f2a24fcfda33b6b164873dd648a5eef2547789aad28cc56208de532e81c026e49085bd +ed21842cecc22eb9e48f31d8249b3f22afaa5bdd5552c99e191c3061463074977eefd5afde7bf5de53d5ddcf5e26d4bbc05c1096f6fcfa9d9aefe174ce16248d +7afeb3d9a4d2f13d2151ba4094a5b8e76fb0f03fbbf7eb5fdd454732c609f6403e1547a8e7c752ae8eaa5531876124eeb0154ee1bb25e30992f0caa3ea82a34b +d09bd06aa3566b55134452df4b51026a1f2f97648ebd9952e9dfdb2a1f53784da5500373caa74a35b6243476715e5708b11143cabd0b447b3eccb3609733fc52 +fa1e4542c2173dbfa6fffceabdbb5574940b517940d6909be8bf5c2e17589c37f49c3c3a2b260d823068f50bfd1a40e53e6edc1eb7c6ad429f06a0f91c569a71 +b175b61bc320c71aa0ecd1a17bd41e35eb16ded0dfdce3dc0fd5c7c26b50a63fd8c34f2643b0a285d7a00c1feee1c3417730b2f56b50866fede1dbb5fe28685b +fa3528a6243ddf43d7c25673b85d6d0159327aec8477c360d26ee4ca4b144443115d6a8a254be5a1584bd00bc6270050408a24493db959e1259a43140f112567 +9c7827248a21f056286502866b8ddaa4d684ffea13e827ed5174849121ad780113b137a4f87862cec94af6fc07a0d537206f7ffef9cdeb1fdfbcfee9cd575fbd +79fdf77c6eadca923b466964cafdf2dd1ffef3cd6fbd7ffff0ed2f5fff319b7a172f4cfcbbbffdeedd3ffef93ef5b0e2d2146ffff4fdbb1fbf7ffbe7dfffebaf +5f3bb4f7393a33e1339260e13dc297de5396c0021dfcf119bf9ec42c46c494e8a791402952b338f48f656ca11f6d10450edc00db767cce21d5b880f7d72f2cc2 +d398af2571687c182716f094313a60dc6985876a2ec3ccb3751ab927e76b13f714a10bd7dc43945a5e1eaf579063894be530c616cd2714a5124538c5d253dfb1 +738c1dabfb8210cbaea764ce99604be97d41bc01224e93ccc899154da5d03149c02f1b1741f0b7659bd3e7de8051d7aa47f8c246c2de40d4417e86a965c6fb68 +2d51e252394309350d7e8264ec2239ddf0b9891b0b099e8e3065de78818570c93ce6b05ec3e90f21cdb8dd7e4a37898de4929cbb749e20c64ce4889d0f6394ac +5cd829496313fbb938871045de13265df05366ef10f50e7e40e941773f27d872f787b3c133c8b026a53240d4376beef0e57dccacf89d6ee8126157aae9f3c44a +b17d4e9cd131584756689f604cd1255a60ec3dfbdcc160c05696cd4bd20f62c82ac7d815580f901dabea3dc5027a25d5dcece7c91322ac909de2881de073bad9 +493c1b9426881fd2fc08bc6eda7c0ca52e7105c0633a3f37818f08f480102f4ea33c16a0c308ee835a9fc4c82a60ea5db8e375c32dff5d658fc1be7c61d1b8c2 +be04197c6d1948eca6cc7b6d3343d49aa00c9819822ec3956e41c4727f29a28aab165b3be596f6a62ddd00dd91d5f42424fd6007b4d3fb84ffbbde073a8cb77f +f9c6b10f3e4ebfe3566c25ab6b763a8792c9f14e7f7308b7dbd50c195f904fbfa919a175fa04431dd9cf58b73dcd6d4fe3ffdff73487f6f36d2773a8dfb8ed64 +7ce8306e3b99fc70e5e3743265f3027d8d3af0c80e7af4b14f72f0d46749289dca0dc527421ffc08f83db398c0a092d3279eb838055cc5f0a8ca1c4c60e1228e +b48cc799fc0d91f134462b381daafb4a492472d591f0564cc0a1911e76ea5678ba4e4ed9223becacd7d5c16656590592e5782d2cc6e1a04a66e856bb3cc02bd4 +6bb6913e68dd1250b2d721614c6693683a48b4b783ca48fa58178ce620a157f65158741d2c3a4afdd6557b2c805ae115f8c1edc1cff49e1f06200242701e07cd +f942f92973f5d6bbda991fd3d3878c69450034d8db08283ddd555c0f2e4fad2e0bb52b78da2261849b4d425b46377822869fc17974aad1abd0b8aeafbba54b2d +7aca147a3e08ad9246bbf33e1637f535c8ede6069a9a9982a6de65cf6f35430899395af5fc251c1ac363b282d811ea3717a211dcbccc25cf36fc4d32cb8a0b39 +4222ce0cae934e960d122231f728497abe5a7ee1069aea1ca2b9d51b90103e59725d482b9f1a3970baed64bc5ce2b934dd6e8c284b67af90e1b35ce1fc568bdf +1cac24d91adc3d8d1797de195df3a708422c6cd795011744c0dd413db3e682c0655891c8caf8db294c79da356fa3740c65e388ae62945714339967709dca0b3a +faadb081f196af190c6a98242f8467912ab0a651ad6a5a548d8cc3c1aafb6121653923699635d3ca2aaa6abab39835c3b60cecd8f26645de60b53531e434b3c2 +67a97b37e576b7b96ea74f28aa0418bcb09fa3ea5ea12018d4cac92c6a8af17e1a56393b1fb56bc776811fa07695226164fdd656ed8edd8a1ae19c0e066f54f9 +416e376a6168b9ed2bb5a5f5adb979b1cdce5e40f2184197bba6526857c2c92e47d0104d754f92a50dd8222f65be35e0c95b73d2f3bfac85fd60d80887955a27 +1c57826650ab74c27eb3d20fc3667d1cd66ba341e31514161927f530bbb19fc00506dde4f7f67a7cefee3ed9ded1dc99b3a4caf4dd7c5513d777f7f5c6e1bb7b +8f40d2f9b2d598749bdd41abd26df627956034e854bac3d6a0326a0ddba3c9681876ba9357be77a1c141bf390c5ae34ea5551f0e2b41aba6e877ba9576d068f4 +8376bf330efaaff23606569ea58fdc16605ecdebde7f010000ffff0300504b0304140006000800000021000dd1909fb60000001b010000270000007468656d65 +2f7468656d652f5f72656c732f7468656d654d616e616765722e786d6c2e72656c73848f4d0ac2301484f78277086f6fd3ba109126dd88d0add40384e4350d36 +3f2451eced0dae2c082e8761be9969bb979dc9136332de3168aa1a083ae995719ac16db8ec8e4052164e89d93b64b060828e6f37ed1567914b284d262452282e +3198720e274a939cd08a54f980ae38a38f56e422a3a641c8bbd048f7757da0f19b017cc524bd62107bd5001996509affb3fd381a89672f1f165dfe514173d985 +0528a2c6cce0239baa4c04ca5bbabac4df000000ffff0300504b01022d0014000600080000002100e9de0fbfff0000001c020000130000000000000000000000 +0000000000005b436f6e74656e745f54797065735d2e786d6c504b01022d0014000600080000002100a5d6a7e7c0000000360100000b00000000000000000000 +000000300100005f72656c732f2e72656c73504b01022d00140006000800000021006b799616830000008a0000001c0000000000000000000000000019020000 +7468656d652f7468656d652f7468656d654d616e616765722e786d6c504b01022d0014000600080000002100b6f4679893070000c92000001600000000000000 +000000000000d60200007468656d652f7468656d652f7468656d65312e786d6c504b01022d00140006000800000021000dd1909fb60000001b01000027000000 +000000000000000000009d0a00007468656d652f7468656d652f5f72656c732f7468656d654d616e616765722e786d6c2e72656c73504b050600000000050005005d010000980b00000000} +{\*\colorschememapping 3c3f786d6c2076657273696f6e3d22312e302220656e636f64696e673d225554462d3822207374616e64616c6f6e653d22796573223f3e0d0a3c613a636c724d +617020786d6c6e733a613d22687474703a2f2f736368656d61732e6f70656e786d6c666f726d6174732e6f72672f64726177696e676d6c2f323030362f6d6169 +6e22206267313d226c743122207478313d22646b3122206267323d226c743222207478323d22646b322220616363656e74313d22616363656e74312220616363 +656e74323d22616363656e74322220616363656e74333d22616363656e74332220616363656e74343d22616363656e74342220616363656e74353d22616363656e74352220616363656e74363d22616363656e74362220686c696e6b3d22686c696e6b2220666f6c486c696e6b3d22666f6c486c696e6b222f3e} +{\*\latentstyles\lsdstimax376\lsdlockeddef0\lsdsemihiddendef0\lsdunhideuseddef0\lsdqformatdef0\lsdprioritydef99{\lsdlockedexcept \lsdqformat1 \lsdpriority0 \lsdlocked0 Normal;\lsdqformat1 \lsdpriority9 \lsdlocked0 heading 1; +\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 2;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 3;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 4; +\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 5;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 6;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 7; +\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 8;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority9 \lsdlocked0 heading 9;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 1; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 5; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 6;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 7;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 8;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index 9; +\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 1;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 2;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 3; +\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 4;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 5;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 6; +\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 7;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 8;\lsdsemihidden1 \lsdunhideused1 \lsdpriority39 \lsdlocked0 toc 9;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal Indent; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footnote text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 header;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footer; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 index heading;\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority35 \lsdlocked0 caption;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 table of figures; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 envelope address;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 envelope return;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 footnote reference;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation reference; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 line number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 page number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 endnote reference;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 endnote text; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 table of authorities;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 macro;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 toa heading;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 3; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 3; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Bullet 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 3; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Number 5;\lsdqformat1 \lsdpriority10 \lsdlocked0 Title;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Closing; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Signature;\lsdsemihidden1 \lsdunhideused1 \lsdpriority1 \lsdlocked0 Default Paragraph Font;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 4; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 List Continue 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Message Header;\lsdqformat1 \lsdpriority11 \lsdlocked0 Subtitle;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Salutation; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Date;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text First Indent;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text First Indent 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Note Heading; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Body Text Indent 3; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Block Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Hyperlink;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 FollowedHyperlink;\lsdqformat1 \lsdpriority22 \lsdlocked0 Strong; +\lsdqformat1 \lsdpriority20 \lsdlocked0 Emphasis;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Document Map;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Plain Text;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 E-mail Signature; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Top of Form;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Bottom of Form;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal (Web);\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Acronym; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Address;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Cite;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Code;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Definition; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Keyboard;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Preformatted;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Sample;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Typewriter; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 HTML Variable;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Normal Table;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 annotation subject;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 No List; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Outline List 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Simple 1; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Simple 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Simple 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Classic 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Colorful 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Colorful 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Colorful 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 3; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Columns 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 6; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 7;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Grid 8;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 4;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 5;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 6; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 7;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table List 8;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table 3D effects 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table 3D effects 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table 3D effects 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Contemporary;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Elegant;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Professional; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Subtle 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Subtle 2;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Web 1;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Web 2; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Web 3;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Balloon Text;\lsdpriority39 \lsdlocked0 Table Grid;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Table Theme;\lsdsemihidden1 \lsdlocked0 Placeholder Text; +\lsdqformat1 \lsdpriority1 \lsdlocked0 No Spacing;\lsdpriority60 \lsdlocked0 Light Shading;\lsdpriority61 \lsdlocked0 Light List;\lsdpriority62 \lsdlocked0 Light Grid;\lsdpriority63 \lsdlocked0 Medium Shading 1;\lsdpriority64 \lsdlocked0 Medium Shading 2; +\lsdpriority65 \lsdlocked0 Medium List 1;\lsdpriority66 \lsdlocked0 Medium List 2;\lsdpriority67 \lsdlocked0 Medium Grid 1;\lsdpriority68 \lsdlocked0 Medium Grid 2;\lsdpriority69 \lsdlocked0 Medium Grid 3;\lsdpriority70 \lsdlocked0 Dark List; +\lsdpriority71 \lsdlocked0 Colorful Shading;\lsdpriority72 \lsdlocked0 Colorful List;\lsdpriority73 \lsdlocked0 Colorful Grid;\lsdpriority60 \lsdlocked0 Light Shading Accent 1;\lsdpriority61 \lsdlocked0 Light List Accent 1; +\lsdpriority62 \lsdlocked0 Light Grid Accent 1;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 1;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 1;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 1;\lsdsemihidden1 \lsdlocked0 Revision; +\lsdqformat1 \lsdpriority34 \lsdlocked0 List Paragraph;\lsdqformat1 \lsdpriority29 \lsdlocked0 Quote;\lsdqformat1 \lsdpriority30 \lsdlocked0 Intense Quote;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 1;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 1; +\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 1;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 1;\lsdpriority70 \lsdlocked0 Dark List Accent 1;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 1;\lsdpriority72 \lsdlocked0 Colorful List Accent 1; +\lsdpriority73 \lsdlocked0 Colorful Grid Accent 1;\lsdpriority60 \lsdlocked0 Light Shading Accent 2;\lsdpriority61 \lsdlocked0 Light List Accent 2;\lsdpriority62 \lsdlocked0 Light Grid Accent 2;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 2; +\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 2;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 2;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 2;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 2;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 2; +\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 2;\lsdpriority70 \lsdlocked0 Dark List Accent 2;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 2;\lsdpriority72 \lsdlocked0 Colorful List Accent 2;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 2; +\lsdpriority60 \lsdlocked0 Light Shading Accent 3;\lsdpriority61 \lsdlocked0 Light List Accent 3;\lsdpriority62 \lsdlocked0 Light Grid Accent 3;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 3;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 3; +\lsdpriority65 \lsdlocked0 Medium List 1 Accent 3;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 3;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 3;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 3;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 3; +\lsdpriority70 \lsdlocked0 Dark List Accent 3;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 3;\lsdpriority72 \lsdlocked0 Colorful List Accent 3;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 3;\lsdpriority60 \lsdlocked0 Light Shading Accent 4; +\lsdpriority61 \lsdlocked0 Light List Accent 4;\lsdpriority62 \lsdlocked0 Light Grid Accent 4;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 4;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 4;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 4; +\lsdpriority66 \lsdlocked0 Medium List 2 Accent 4;\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 4;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 4;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 4;\lsdpriority70 \lsdlocked0 Dark List Accent 4; +\lsdpriority71 \lsdlocked0 Colorful Shading Accent 4;\lsdpriority72 \lsdlocked0 Colorful List Accent 4;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 4;\lsdpriority60 \lsdlocked0 Light Shading Accent 5;\lsdpriority61 \lsdlocked0 Light List Accent 5; +\lsdpriority62 \lsdlocked0 Light Grid Accent 5;\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 5;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 5;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 5;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 5; +\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 5;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 5;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 5;\lsdpriority70 \lsdlocked0 Dark List Accent 5;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 5; +\lsdpriority72 \lsdlocked0 Colorful List Accent 5;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 5;\lsdpriority60 \lsdlocked0 Light Shading Accent 6;\lsdpriority61 \lsdlocked0 Light List Accent 6;\lsdpriority62 \lsdlocked0 Light Grid Accent 6; +\lsdpriority63 \lsdlocked0 Medium Shading 1 Accent 6;\lsdpriority64 \lsdlocked0 Medium Shading 2 Accent 6;\lsdpriority65 \lsdlocked0 Medium List 1 Accent 6;\lsdpriority66 \lsdlocked0 Medium List 2 Accent 6; +\lsdpriority67 \lsdlocked0 Medium Grid 1 Accent 6;\lsdpriority68 \lsdlocked0 Medium Grid 2 Accent 6;\lsdpriority69 \lsdlocked0 Medium Grid 3 Accent 6;\lsdpriority70 \lsdlocked0 Dark List Accent 6;\lsdpriority71 \lsdlocked0 Colorful Shading Accent 6; +\lsdpriority72 \lsdlocked0 Colorful List Accent 6;\lsdpriority73 \lsdlocked0 Colorful Grid Accent 6;\lsdqformat1 \lsdpriority19 \lsdlocked0 Subtle Emphasis;\lsdqformat1 \lsdpriority21 \lsdlocked0 Intense Emphasis; +\lsdqformat1 \lsdpriority31 \lsdlocked0 Subtle Reference;\lsdqformat1 \lsdpriority32 \lsdlocked0 Intense Reference;\lsdqformat1 \lsdpriority33 \lsdlocked0 Book Title;\lsdsemihidden1 \lsdunhideused1 \lsdpriority37 \lsdlocked0 Bibliography; +\lsdsemihidden1 \lsdunhideused1 \lsdqformat1 \lsdpriority39 \lsdlocked0 TOC Heading;\lsdpriority41 \lsdlocked0 Plain Table 1;\lsdpriority42 \lsdlocked0 Plain Table 2;\lsdpriority43 \lsdlocked0 Plain Table 3;\lsdpriority44 \lsdlocked0 Plain Table 4; +\lsdpriority45 \lsdlocked0 Plain Table 5;\lsdpriority40 \lsdlocked0 Grid Table Light;\lsdpriority46 \lsdlocked0 Grid Table 1 Light;\lsdpriority47 \lsdlocked0 Grid Table 2;\lsdpriority48 \lsdlocked0 Grid Table 3;\lsdpriority49 \lsdlocked0 Grid Table 4; +\lsdpriority50 \lsdlocked0 Grid Table 5 Dark;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 1;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 1; +\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 1;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 1;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 1;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 1; +\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 1;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 2;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 2;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 2; +\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 2;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 2;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 2;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 2; +\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 3;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 3;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 3;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 3; +\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 3;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 3;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 3;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 4; +\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 4;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 4;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 4;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 4; +\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 4;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 4;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 5;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 5; +\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 5;\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 5;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 5;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 5; +\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 5;\lsdpriority46 \lsdlocked0 Grid Table 1 Light Accent 6;\lsdpriority47 \lsdlocked0 Grid Table 2 Accent 6;\lsdpriority48 \lsdlocked0 Grid Table 3 Accent 6; +\lsdpriority49 \lsdlocked0 Grid Table 4 Accent 6;\lsdpriority50 \lsdlocked0 Grid Table 5 Dark Accent 6;\lsdpriority51 \lsdlocked0 Grid Table 6 Colorful Accent 6;\lsdpriority52 \lsdlocked0 Grid Table 7 Colorful Accent 6; +\lsdpriority46 \lsdlocked0 List Table 1 Light;\lsdpriority47 \lsdlocked0 List Table 2;\lsdpriority48 \lsdlocked0 List Table 3;\lsdpriority49 \lsdlocked0 List Table 4;\lsdpriority50 \lsdlocked0 List Table 5 Dark; +\lsdpriority51 \lsdlocked0 List Table 6 Colorful;\lsdpriority52 \lsdlocked0 List Table 7 Colorful;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 1;\lsdpriority47 \lsdlocked0 List Table 2 Accent 1;\lsdpriority48 \lsdlocked0 List Table 3 Accent 1; +\lsdpriority49 \lsdlocked0 List Table 4 Accent 1;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 1;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 1;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 1; +\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 2;\lsdpriority47 \lsdlocked0 List Table 2 Accent 2;\lsdpriority48 \lsdlocked0 List Table 3 Accent 2;\lsdpriority49 \lsdlocked0 List Table 4 Accent 2; +\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 2;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 2;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 2;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 3; +\lsdpriority47 \lsdlocked0 List Table 2 Accent 3;\lsdpriority48 \lsdlocked0 List Table 3 Accent 3;\lsdpriority49 \lsdlocked0 List Table 4 Accent 3;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 3; +\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 3;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 3;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 4;\lsdpriority47 \lsdlocked0 List Table 2 Accent 4; +\lsdpriority48 \lsdlocked0 List Table 3 Accent 4;\lsdpriority49 \lsdlocked0 List Table 4 Accent 4;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 4;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 4; +\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 4;\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 5;\lsdpriority47 \lsdlocked0 List Table 2 Accent 5;\lsdpriority48 \lsdlocked0 List Table 3 Accent 5; +\lsdpriority49 \lsdlocked0 List Table 4 Accent 5;\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 5;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 5;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 5; +\lsdpriority46 \lsdlocked0 List Table 1 Light Accent 6;\lsdpriority47 \lsdlocked0 List Table 2 Accent 6;\lsdpriority48 \lsdlocked0 List Table 3 Accent 6;\lsdpriority49 \lsdlocked0 List Table 4 Accent 6; +\lsdpriority50 \lsdlocked0 List Table 5 Dark Accent 6;\lsdpriority51 \lsdlocked0 List Table 6 Colorful Accent 6;\lsdpriority52 \lsdlocked0 List Table 7 Colorful Accent 6;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Mention; +\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Smart Hyperlink;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Hashtag;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Unresolved Mention;\lsdsemihidden1 \lsdunhideused1 \lsdlocked0 Smart Link;}}{\*\datastore 01050000 +02000000180000004d73786d6c322e534158584d4c5265616465722e362e3000000000000000000000060000 +d0cf11e0a1b11ae1000000000000000000000000000000003e000300feff090006000000000000000000000001000000010000000000000000100000feffffff00000000feffffff0000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +fffffffffffffffffdfffffffeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +ffffffffffffffffffffffffffffffff52006f006f007400200045006e00740072007900000000000000000000000000000000000000000000000000000000000000000000000000000000000000000016000500ffffffffffffffffffffffff0c6ad98892f1d411a65f0040963251e5000000000000000000000000d0af +77916744d801feffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff00000000000000000000000000000000000000000000000000000000 +00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff0000000000000000000000000000000000000000000000000000 +000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffff000000000000000000000000000000000000000000000000 +0000000000000000000000000000000000000000000000000105000000000000}} \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.txt b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.txt new file mode 100644 index 000000000000..edd9160b1d4b --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.txt @@ -0,0 +1,13 @@ +A Text Extraction Test Document +for +DSpace + +This is a text. For the next sixty seconds this software will conduct a test of the DSpace text extraction facility. This is only a text. + +This is a paragraph that followed the first that lived in the document that Jack built. + +Lorem ipsum dolor sit amet. The quick brown fox jumped over the lazy dog. Yow! Are we having fun yet? + +This has been a test of the DSpace text extraction system. In the event of actual content you would care what is written here. + +Tip o’ the hat to the U.S. Emergency Broadcast System for the format that I have irreverently borrowed. \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xls b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xls new file mode 100644 index 000000000000..1ebc20bc3810 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xls differ diff --git a/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xlsx b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xlsx new file mode 100644 index 000000000000..47e0f7387f6a Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/mediafilter/test.xlsx differ diff --git a/dspace-api/src/test/resources/org/dspace/app/openaire-events/empty-events-list.json b/dspace-api/src/test/resources/org/dspace/app/openaire-events/empty-events-list.json new file mode 100644 index 000000000000..0637a088a01e --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/openaire-events/empty-events-list.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/dspace/modules/rest/src/main/webapp/.gitignore b/dspace-api/src/test/resources/org/dspace/app/openaire-events/empty-file.json similarity index 100% rename from dspace/modules/rest/src/main/webapp/.gitignore rename to dspace-api/src/test/resources/org/dspace/app/openaire-events/empty-file.json diff --git a/dspace-api/src/test/resources/org/dspace/app/openaire-events/events.json b/dspace-api/src/test/resources/org/dspace/app/openaire-events/events.json new file mode 100644 index 000000000000..9bb8daae36c7 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/openaire-events/events.json @@ -0,0 +1,62 @@ +[ + + { + "originalId": "oai:www.openstarts.units.it:123456789/99998", + "title": "Egypt, crossroad of translations and literary interweavings", + "topic": "ENRICH/MORE/PROJECT", + "trust": 1.0, + "message": { + "projects[0].acronym": "PAThs", + "projects[0].code": "687567", + "projects[0].funder": "EC", + "projects[0].fundingProgram": "H2020", + "projects[0].jurisdiction": "EU", + "projects[0].openaireId": "40|corda__h2020::6e32f5eb912688f2424c68b851483ea4", + "projects[0].title": "Tracking Papyrus and Parchment Paths" + } + }, + + { + "originalId": "oai:www.openstarts.units.it:123456789/99999", + "title": "Test Publication", + "topic": "ENRICH/MISSING/ABSTRACT", + "trust": 1.0, + "message": { + "abstracts[0]": "Missing Abstract" + } + }, + { + "originalId": "oai:www.openstarts.units.it:123456789/99998", + "title": "Egypt, crossroad of translations and literary interweavings", + "topic": "ENRICH/MISSING/PID", + "trust": 1.0, + "message": { + "pids[0].type": "doi", + "pids[0].value": "10.13137/2282-572x/987" + } + }, + { + "originalId": "oai:www.openstarts.units.it:123456789/99999", + "title": "Test Publication", + "topic": "ENRICH/MORE/PID", + "trust": 0.375, + "message": { + "pids[0].type": "doi", + "pids[0].value": "987654" + } + }, + { + "originalId": "oai:www.openstarts.units.it:123456789/99999", + "title": "Test Publication", + "topic": "ENRICH/MISSING/PROJECT", + "trust": 1.0, + "message": { + "projects[0].acronym": "02.SNES missing project acronym", + "projects[0].code": "prjcode_snes", + "projects[0].funder": "02.SNES missing project funder", + "projects[0].fundingProgram": "02.SNES missing project fundingProgram", + "projects[0].jurisdiction": "02.SNES missing project jurisdiction", + "projects[0].title": "Project01" + } + } +] \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/openaire-events/unknown-topic-events.json b/dspace-api/src/test/resources/org/dspace/app/openaire-events/unknown-topic-events.json new file mode 100644 index 000000000000..3caa72cf35b3 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/openaire-events/unknown-topic-events.json @@ -0,0 +1,20 @@ +[ + + { + "originalId": "oai:www.openstarts.units.it:123456789/99998", + "title": "Egypt, crossroad of translations and literary interweavings (3rd-6th centuries). A reconsideration of earlier Coptic literature", + "topic": "ENRICH/MORE/UNKNOWN", + "trust": 1.0 + }, + + { + "originalId": "oai:www.openstarts.units.it:123456789/999991", + "title": "Test Publication 2", + "topic": "ENRICH/MISSING/ABSTRACT", + "trust": 1.0, + "message": { + "abstracts[0]": "Missing Abstract" + } + } + +] \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277871.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277871.xml new file mode 100644 index 000000000000..f5fd30fa1359 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277871.xml @@ -0,0 +1,31 @@ + + + 2014-01-22T19:11:57.151Z + 2015-06-19T19:14:25.924Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Branch artery occlusion in a young woman. + + + formatted-unspecified + Gittinger JW, Miller NR, Keltner JL, Burde RM. Branch artery occlusion in a young woman. Surv Ophthalmol. 1985 Jul-Aug; 30(1):52-8. + + journal-article + + 1985 + 07 + 01 + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277902.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277902.xml new file mode 100644 index 000000000000..aeab7285439c --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277902.xml @@ -0,0 +1,54 @@ + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.327Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Another cautionary tale. + + Journal title + Short description + journal-article + + 2011 + 05 + 01 + + + + Walter White + walter@test.com + + first + author + + + + John White + john@test.com + + additional + author + + + + Jesse Pinkman + + first + editor + + + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277904.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277904.xml new file mode 100644 index 000000000000..980daa490e63 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/work-277904.xml @@ -0,0 +1,62 @@ + + + 2014-01-22T19:11:57.160Z + 2015-06-19T19:14:26.350Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + The elements of style and the survey of ophthalmology. + + + bibtex + @article{Test, + doi = {10.11234.12}, + year = 2011, + month = {nov}, + publisher = {Elsevier {BV}}, + volume = {110}, + pages = {71--83}, + author = {Walter White}, + title = {Title from Bibtex: The elements of style and the survey of ophthalmology.}, + journal = {Test Journal} + } + + + invention + + + agr + work:external-identifier-id + http://orcid.org + version-of + + + doi + 10.11234.12 + http://orcid.org + self + + + + + Walter White + walter@test.com + + first + author + + + + it + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902-277871.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902-277871.xml new file mode 100644 index 000000000000..97d39dcf41f2 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902-277871.xml @@ -0,0 +1,147 @@ + + + + 2014-01-22T19:11:57.160Z + 2015-06-19T19:14:26.350Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + The elements of style and the survey of ophthalmology. + + + bibtex + @article{Test, + doi = {10.11234.12}, + year = 2011, + month = {nov}, + publisher = {Elsevier {BV}}, + volume = {110}, + pages = {71--83}, + author = {Walter White}, + title = {Title from Bibtex: The elements of style and the survey of ophthalmology.}, + journal = {Test Journal} + } + + + invention + + + agr + work:external-identifier-id + http://orcid.org + version-of + + + doi + 10.11234.12 + http://orcid.org + self + + + + + Walter White + walter@test.com + + first + author + + + + it + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.327Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Another cautionary tale. + + Journal title + Short description + journal-article + + 2011 + 05 + 01 + + + + Walter White + walter@test.com + + first + author + + + + John White + john@test.com + + additional + author + + + + Jesse Pinkman + + first + editor + + + + + + 2014-01-22T19:11:57.151Z + 2015-06-19T19:14:25.924Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Branch artery occlusion in a young woman. + + + formatted-unspecified + Gittinger JW, Miller NR, Keltner JL, Burde RM. Branch artery occlusion in a young woman. Surv Ophthalmol. 1985 Jul-Aug; 30(1):52-8. + + journal-article + + 1985 + 07 + 01 + + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902.xml new file mode 100644 index 000000000000..6c9d0d7db6c8 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/workBulk-277904-277902.xml @@ -0,0 +1,117 @@ + + + + 2014-01-22T19:11:57.160Z + 2015-06-19T19:14:26.350Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + The elements of style and the survey of ophthalmology. + + + bibtex + @article{Test, + doi = {10.11234.12}, + year = 2011, + month = {nov}, + publisher = {Elsevier {BV}}, + volume = {110}, + pages = {71--83}, + author = {Walter White}, + title = {Title from Bibtex: The elements of style and the survey of ophthalmology.}, + journal = {Test Journal} + } + + + invention + + + agr + work:external-identifier-id + http://orcid.org + version-of + + + doi + 10.11234.12 + http://orcid.org + self + + + + + Walter White + walter@test.com + + first + author + + + + it + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.327Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Another cautionary tale. + + Journal title + Short description + journal-article + + 2011 + 05 + 01 + + + + Walter White + walter@test.com + + first + author + + + + John White + john@test.com + + additional + author + + + + Jesse Pinkman + + first + editor + + + + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/orcid-works/works.xml b/dspace-api/src/test/resources/org/dspace/app/orcid-works/works.xml new file mode 100644 index 000000000000..411160ef8ece --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/orcid-works/works.xml @@ -0,0 +1,196 @@ + + + 2015-06-19T19:14:26.350Z + + 2015-06-19T19:14:26.350Z + + + 2014-01-22T19:11:57.160Z + 2015-06-19T19:14:26.350Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + The elements of style and the survey of ophthalmology. + + invention + + 2012 + 11 + 01 + + + + + 2015-06-19T19:14:26.339Z + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.339Z + + + https://sandbox.orcid.org/client/DSPACE-CLIENT-ID + DSPACE-CLIENT-ID + sandbox.orcid.org + + DSPACE-CRIS + + + Introduction. + + journal-article + + 2011 + 11 + 01 + + + + + 2015-06-19T19:14:26.327Z + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.327Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Another cautionary tale. + + journal-article + + 2011 + 05 + 01 + + + + 2014-01-22T19:11:57.159Z + 2015-06-19T19:14:26.327Z + + + https://sandbox.orcid.org/client/4Science + 4Science + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Another cautionary tale (4Science). + + journal-article + + 2011 + 05 + 01 + + + + + 2015-06-19T19:14:26.108Z + + + 2014-01-22T19:11:57.155Z + 2015-06-19T19:14:26.108Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/DSPACE-CLIENT-ID + DSPACE-CLIENT-ID + sandbox.orcid.org + + DSPACE-CRIS + + + Functional hemianopsia: a historical perspective. + + journal-article + + 1988 + 05 + 01 + + + + 2014-01-22T19:11:57.151Z + 2015-06-19T19:14:25.924Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Branch artery occlusion in a young man. + + journal-article + + 1985 + 07 + 01 + + + + + 2015-06-19T19:14:26.108Z + + + 2014-01-22T19:11:57.151Z + 2015-06-19T19:14:25.924Z + + + https://sandbox.orcid.org/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + + https://sandbox.orcid.org/client/0000-0002-4105-0763 + 0000-0002-4105-0763 + sandbox.orcid.org + + BU Profiles to ORCID Integration Site + + + Branch artery occlusion in a young woman. + + journal-article + + 1985 + 07 + 01 + + + + \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/sherpa/0000-0000.json b/dspace-api/src/test/resources/org/dspace/app/sherpa/0000-0000.json new file mode 100644 index 000000000000..3b9e47450238 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/sherpa/0000-0000.json @@ -0,0 +1,3 @@ +{ + "items": [] +} \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/sherpa/2731-0582.json b/dspace-api/src/test/resources/org/dspace/app/sherpa/2731-0582.json new file mode 100644 index 000000000000..2e5c7e2db9ca --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/sherpa/2731-0582.json @@ -0,0 +1,504 @@ +{ + "items": [ + { + "system_metadata": { + "id": 40863, + "uri": "https://v2.sherpa.ac.uk/id/publication/40863", + "date_modified": "2022-03-25 14:08:29", + "publicly_visible": "yes", + "publicly_visible_phrases": [ + { + "language": "en", + "phrase": "Yes", + "value": "yes" + } + ], + "date_created": "2022-01-11 09:43:53" + }, + "tj_status_phrases": [ + { + "phrase": "Plan S Approved", + "value": "plan_s_approved", + "language": "en" + } + ], + "type_phrases": [ + { + "value": "journal", + "phrase": "Journal", + "language": "en" + } + ], + "id": 40863, + "issns": [ + { + "issn": "2731-0582" + } + ], + "publishers": [ + { + "relationship_type": "commercial_publisher", + "relationship_type_phrases": [ + { + "value": "commercial_publisher", + "phrase": "Commercial Publisher", + "language": "en" + } + ], + "publisher": { + "id": 3286, + "name": [ + { + "name": "Nature Research", + "language": "en", + "preferred_phrases": [ + { + "language": "en", + "phrase": "Name", + "value": "name" + } + ], + "preferred": "name", + "language_phrases": [ + { + "phrase": "English", + "value": "en", + "language": "en" + } + ] + } + ], + "imprint_of_id": 62037, + "country": "gb", + "country_phrases": [ + { + "value": "gb", + "phrase": "United Kingdom", + "language": "en" + } + ], + "publication_count": 87, + "uri": "https://v2.sherpa.ac.uk/id/publisher/3286", + "url": "https://www.nature.com/" + } + } + ], + "listed_in_doaj_phrases": [ + { + "language": "en", + "phrase": "No", + "value": "no" + } + ], + "listed_in_doaj": "no", + "tj_status": [ + "plan_s_approved" + ], + "publisher_policy": [ + { + "open_access_prohibited": "no", + "id": 3286, + "publication_count": 36, + "internal_moniker": "Default Policy", + "urls": [ + { + "description": "Self archiving and license to publish", + "url": "https://www.nature.com/neuro/editorial-policies/self-archiving-and-license-to-publish" + }, + { + "description": "Preprints and Conference Proceedings", + "url": "https://www.nature.com/nature-portfolio/editorial-policies/preprints-and-conference-proceedings" + }, + { + "url": "https://www.springernature.com/gp/open-research/policies/accepted-manuscript-terms", + "description": "Accepted manuscript terms of use" + } + ], + "open_access_prohibited_phrases": [ + { + "value": "no", + "phrase": "No", + "language": "en" + } + ], + "uri": "https://v2.sherpa.ac.uk/id/publisher_policy/3286", + "permitted_oa": [ + { + "prerequisites": { + "prerequisites_phrases": [ + { + "language": "en", + "value": "when_research_article", + "phrase": "If a Research Article" + } + ], + "prerequisites": [ + "when_research_article" + ] + }, + "copyright_owner": "authors", + "additional_oa_fee_phrases": [ + { + "language": "en", + "value": "no", + "phrase": "No" + } + ], + "article_version_phrases": [ + { + "language": "en", + "value": "submitted", + "phrase": "Submitted" + } + ], + "additional_oa_fee": "no", + "copyright_owner_phrases": [ + { + "language": "en", + "value": "authors", + "phrase": "Authors" + } + ], + "article_version": [ + "submitted" + ], + "location": { + "location_phrases": [ + { + "value": "authors_homepage", + "phrase": "Author's Homepage", + "language": "en" + }, + { + "language": "en", + "phrase": "Funder Designated Location", + "value": "funder_designated_location" + }, + { + "language": "en", + "value": "institutional_repository", + "phrase": "Institutional Repository" + }, + { + "phrase": "Preprint Repository", + "value": "preprint_repository", + "language": "en" + } + ], + "location": [ + "authors_homepage", + "funder_designated_location", + "institutional_repository", + "preprint_repository" + ] + }, + "conditions": [ + "Must link to publisher version", + "Upon publication, source must be acknowledged and DOI cited", + "Post-prints are subject to Springer Nature re-use terms", + "Non-commercial use only" + ] + }, + { + "embargo": { + "units": "months", + "amount": 6, + "units_phrases": [ + { + "phrase": "Months", + "value": "months", + "language": "en" + } + ] + }, + "license": [ + { + "license_phrases": [ + { + "phrase": "Publisher's Bespoke License", + "value": "bespoke_license", + "language": "en" + } + ], + "license": "bespoke_license" + } + ], + "article_version_phrases": [ + { + "value": "accepted", + "phrase": "Accepted", + "language": "en" + } + ], + "additional_oa_fee": "no", + "conditions": [ + "Must link to publisher version", + "Published source must be acknowledged and DOI cited", + "Post-prints are subject to Springer Nature re-use terms", + "Non-commercial use only" + ], + "copyright_owner_phrases": [ + { + "phrase": "Authors", + "value": "authors", + "language": "en" + } + ], + "location": { + "location": [ + "authors_homepage", + "funder_designated_location", + "institutional_repository", + "named_repository" + ], + "location_phrases": [ + { + "phrase": "Author's Homepage", + "value": "authors_homepage", + "language": "en" + }, + { + "phrase": "Funder Designated Location", + "value": "funder_designated_location", + "language": "en" + }, + { + "language": "en", + "value": "institutional_repository", + "phrase": "Institutional Repository" + }, + { + "language": "en", + "value": "named_repository", + "phrase": "Named Repository" + } + ], + "named_repository": [ + "PubMed Central", + "Europe PMC" + ] + }, + "article_version": [ + "accepted" + ], + "prerequisites": { + "prerequisites": [ + "when_research_article" + ], + "prerequisites_phrases": [ + { + "value": "when_research_article", + "phrase": "If a Research Article", + "language": "en" + } + ] + }, + "copyright_owner": "authors", + "additional_oa_fee_phrases": [ + { + "language": "en", + "value": "no", + "phrase": "No" + } + ] + } + ] + }, + { + "id": 4410, + "open_access_prohibited": "no", + "urls": [ + { + "url": "https://www.springernature.com/gp/open-research/about/the-fundamentals-of-open-access-and-open-research", + "description": "The fundamentals of open access and open research" + }, + { + "url": "https://www.nature.com/neuro/editorial-policies/self-archiving-and-license-to-publish", + "description": "Self archiving and license to publish" + }, + { + "url": "https://www.springernature.com/gp/open-research/policies/journal-policies", + "description": "Open access policies for journals" + } + ], + "open_access_prohibited_phrases": [ + { + "language": "en", + "phrase": "No", + "value": "no" + } + ], + "internal_moniker": "Open Access", + "publication_count": 34, + "permitted_oa": [ + { + "additional_oa_fee_phrases": [ + { + "language": "en", + "phrase": "Yes", + "value": "yes" + } + ], + "copyright_owner": "authors", + "conditions": [ + "Published source must be acknowledged with citation" + ], + "article_version": [ + "published" + ], + "copyright_owner_phrases": [ + { + "language": "en", + "value": "authors", + "phrase": "Authors" + } + ], + "location": { + "location_phrases": [ + { + "phrase": "Any Website", + "value": "any_website", + "language": "en" + }, + { + "language": "en", + "phrase": "Journal Website", + "value": "this_journal" + } + ], + "location": [ + "any_website", + "this_journal" + ] + }, + "additional_oa_fee": "yes", + "article_version_phrases": [ + { + "phrase": "Published", + "value": "published", + "language": "en" + } + ], + "license": [ + { + "license_phrases": [ + { + "phrase": "CC BY", + "value": "cc_by", + "language": "en" + } + ], + "license": "cc_by", + "version": "4.0" + } + ], + "publisher_deposit": [ + { + "repository_metadata": { + "type_phrases": [ + { + "language": "en", + "value": "disciplinary", + "phrase": "Disciplinary" + } + ], + "notes": "Launched as UK PubMed Central (UKPMC) in January 2007, changed to Europe PubMed Central in November 2012.\r\nSpecial item types include: Links", + "url": "http://europepmc.org/", + "type": "disciplinary", + "name": [ + { + "name": "Europe PMC", + "language": "en", + "preferred": "name", + "language_phrases": [ + { + "value": "en", + "phrase": "English", + "language": "en" + } + ], + "preferred_phrases": [ + { + "language": "en", + "phrase": "Name", + "value": "name" + } + ] + } + ] + }, + "system_metadata": { + "id": 908, + "uri": "https://v2.sherpa.ac.uk/id/repository/908" + } + }, + { + "system_metadata": { + "id": 267, + "uri": "https://v2.sherpa.ac.uk/id/repository/267" + }, + "repository_metadata": { + "type_phrases": [ + { + "language": "en", + "phrase": "Disciplinary", + "value": "disciplinary" + } + ], + "type": "disciplinary", + "url": "http://www.ncbi.nlm.nih.gov/pmc/", + "name": [ + { + "language": "en", + "name": "PubMed Central", + "preferred": "name", + "language_phrases": [ + { + "language": "en", + "value": "en", + "phrase": "English" + } + ], + "preferred_phrases": [ + { + "language": "en", + "value": "name", + "phrase": "Name" + } + ] + } + ] + } + } + ] + } + ], + "uri": "https://v2.sherpa.ac.uk/id/publisher_policy/4410" + } + ], + "title": [ + { + "preferred_phrases": [ + { + "language": "en", + "phrase": "Title", + "value": "name" + } + ], + "language_phrases": [ + { + "language": "en", + "value": "en", + "phrase": "English" + } + ], + "preferred": "name", + "title": "Nature Synthesis", + "language": "en" + } + ], + "type": "journal", + "url": "https://www.nature.com/natsynth/" + } + ] +} \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/external/openaire-no-projects.xml b/dspace-api/src/test/resources/org/dspace/external/openaire-no-projects.xml new file mode 100644 index 000000000000..315ae63ad12a --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/external/openaire-no-projects.xml @@ -0,0 +1,16 @@ + + +
    + (oaftype exact project) and ( mushroomss) + en_US + 10 + 1 + 0 + + +
    + + + + +
    \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/external/openaire-project.xml b/dspace-api/src/test/resources/org/dspace/external/openaire-project.xml new file mode 100644 index 000000000000..90fc11e39a4b --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/external/openaire-project.xml @@ -0,0 +1,75 @@ + + +
    + (oaftype exact project) and (projectcode_nt exact "110062") and (fundershortname exact "FCT") + en_US + 10 + 1 + 1 + +
    + + +
    + fct_________::59523e9f4736c2cab70a470f088b53dd + 2021-08-06 + 2021-08-06 +
    + + + + + fct_________::110062 + http://www.fct.pt/apoios/projectos/consulta/vglobal_projecto.phtml.en?idProjecto=110062&idElemConcurso=3734 + 110062 + PTDC/AGR-ALI/110062/2009 + Portuguese Wild Mushrooms: Chemical characterization and functional study of antiproliferative and proapoptotic properties in cancer cell lines + 2010-12-24 + 2013-12-23 + PTDC/2009 + Agricultural and Forestry Sciences - Food Science and Technology + 0 + false + false + false + 0.0 + 0.0 + + + fct_________::FCT + FCT + Fundao para a Cincia e a Tecnologia, I.P. + PT + + + fct_________::FCT::5876-PPCDTI + 5876-PPCDTI + 5876-PPCDTI + + fct:program + + + + false + false + 0.900 + null + + + + + + + +
    +
    + +
    \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/external/openaire-projects.xml b/dspace-api/src/test/resources/org/dspace/external/openaire-projects.xml new file mode 100644 index 000000000000..07b4c687b451 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/external/openaire-projects.xml @@ -0,0 +1,637 @@ + + +
    + (oaftype exact project) and ( mushroom) + en_US + 10 + 1 + 77 + +
    + + +
    + rcuk________::98b50e6e0715fad40627833c7030d3c3 + 2018-02-06 + 2021-02-20 +
    + + + + + rcuk________::103679 + 103679 + Mushroom Robo-Pic - Development of an autonomous robotic mushroom picking system + 2017-10-01 + 2019-06-30 + 0 + false + false + false + 0.0 + 0.0 + + + rcuk________::RCUK + UKRI + UK Research and Innovation + GB + + + rcuk________::RCUK::Innovate UK + Innovate UK + Innovate UK + + rcuk:fundingStream + + + + false + false + 0.900 + null + + + + + rcuk________::ebafcf5f45afa2e9807f981e668db66b + Littleport Mushroom Farms Llp + + + + + + + +
    + +
    + rcuk________::6ac77c83ee0d98c433f91f3dc83074b2 + 2017-11-04 + 2021-02-20 +
    + + + + + rcuk________::752540 + 752540 + Exending Shelf Life of Mushroom Growing Kits + 2015-05-01 + 2015-10-31 + 0 + false + false + false + 0.0 + 0.0 + + + rcuk________::RCUK + UKRI + UK Research and Innovation + GB + + + rcuk________::RCUK::Innovate UK + Innovate UK + Innovate UK + + rcuk:fundingStream + + + + false + false + 0.900 + null + + + + + rcuk________::f2a533e22408279c50f647779633cf69 + Espresso Mushroom Company Ltd + + + + + + + +
    + +
    + arc_________::e1da9b244237847b24379fb1b11fb151 + 2015-08-24 + 2018-11-20 +
    + + + + arc_________::LP0220040 + http://purl.org/au-research/grants/arc/LP0220040 + LP0220040 + Use of Organic Residues in Edible Mushroom Production + 2002-01-01 + 2003-12-31 + compost,exotic mushrooms,mushroom production,organic wastes,peat,wood processing wastes + 0 + false + false + false + 0.0 + 0.0 + + + arc_________::ARC + ARC + Australian Research Council (ARC) + AU + + + arc_________::ARC::Linkage Projects + Linkage Projects + Linkage Projects + + arc:fundingStream + + + + false + false + 0.900 + null + + + + + + + +
    + +
    + rcuk________::c137d9bfad46b1ebcc9b3f06e6eb5683 + 2018-08-01 + 2021-02-20 +
    + + + + + rcuk________::133611 + 133611 + The development of a mushroom harvesting machine to increase yield and production while reducing waste and labour shortage risk + 2018-07-01 + 2019-06-30 + 0 + false + false + false + 0.0 + 0.0 + + + rcuk________::RCUK + UKRI + UK Research and Innovation + GB + + + rcuk________::RCUK::Innovate UK + Innovate UK + Innovate UK + + rcuk:fundingStream + + + + false + false + 0.900 + null + + + + + rcuk________::560dfc3b58d1ab0d8a8957a943a76962 + + Mushroom Machine Company Limited + + + + + + +
    + +
    + corda__h2020::c97f7d6f1ff338991c0ec20b33ddb1e0 + 2018-07-21 + 2021-07-19 +
    + + + + + corda__h2020::820352 + 820352 + Smartmushroom + Smart MAnagement of spent mushRoom subsTrate to lead the MUSHROOM sector towards a circular economy + 2018-08-01 + 2021-01-31 + H2020-EIC-FTI-2018-2020 + 0 + false + Fast Track to Innovation (FTI) + true + false + Waste from animal breeding and agriculture, specifically horse and chicken manure and wheat straw, are the raw materials of the growing substrate of mushroom. To grow 1 tonne of mushroom, 3 to 4 tonnes of substrate are needed. However, when mushroom production is completed the substrate cannot be used for another growing cycle due to the depletion of nutrients needed for mushroom growing and it is called Spent Mushroom Substrate (SMS) and becomes a waste that should be managed according to regulations. In Europe, c.a. 3.65 million tons of SMS are generated each year. SMS is a high-moisture content bulk material rich in organic matter and nutrients and it could be reused in agriculture by adding it to the soils as amendment or mulch or weathered to be reused as casing soil. However, nitrates directive set a disposal limit that makes that large quantities of SMS cannot be simply spread in soils next to growers’ facilities, as there is a high risk of leachates and water pollution. Due to its low bulk density and high water content, transportation costs are high and therefore storage is becoming a sound problem. SmartMUSHROOM aims to increase mushroom growers’ waste management efficiency by using a new technology which allow them to obtain enough biogas from fresh SMS to dry a mixture of digestate and additional fresh SMS and pelletize it targeting to obtain a marketable high-quality organic fertilizer rich in organic matter and in nutrients, easy to handle, store and transport to any farming region in Europe. A perfect example of biobased circular economy. The aim of the project is to build a pilot plant to demonstrate the technology and find the best commercial formulation for the pellets to enter organic farming market. After the end of project we aim to build at least 18 treatment plants that will place in market 153,000 tonnes of SMS-pellets, generating a total Turnover of 54M€ in the period 2021-2025 and up to 105 related new jobs. + EUR + 2977940.0 + 2264140.0 + + + ec__________::EC + EC + European Commission + EU + + + ec__________::EC::H2020::IA + Innovation action + IA + ec:h2020toas + + + ec__________::EC::H2020 + H2020 + Horizon 2020 Framework Programme + + ec:h2020fundings + + + + + + false + false + 0.900 + null + + + + + pending_org_::39d5641e14f7cfe56e3e836199e33eba + ECOBELIEVE DOO + + ECOBELIEVE DOO GRKINJA + + + pending_org_::3c128a988f1404fed53e1cd8a61df51b + Asociacion Profesional de Productores de Compost y Hongos, de la Rioja, Navarra y Aragón + + ASOCHAMP + + + pending_org_::7555b3c59a033e789ce6190a6c9f39aa + INVESTIGACION Y DESARROLLO CASTILLA Y LEON S.A + + IDECAL S.A. + + + pending_org_::4193497542c7b30e3f50f52414905579 + NOVIS GMBH + + NOVIS GMBH + + + + + + +
    + +
    + nwo_________::c091653b5930a5a11c748720167b04d7 + 2016-06-23 + 2018-08-07 +
    + + + + + nwo_________::2300148817 + 2300148817 + Production of therapeutic proteins in mushroom + 2007-09-01 + 2012-04-30 + 0 + false + false + false + 0.0 + 0.0 + + + nwo_________::NWO + NWO + Netherlands Organisation for Scientific Research (NWO) + NL + + + + false + false + 0.900 + null + + + + + + + +
    + +
    + nwo_________::5ee5a31d8c77215faef3bd35bd9696ff + 2016-06-23 + 2018-08-07 +
    + + + + + nwo_________::2300148209 + 2300148209 + Control of Verticillium fungicola on mushroom + 2006-10-01 + 2012-08-10 + 0 + false + false + false + 0.0 + 0.0 + + + nwo_________::NWO + NWO + Netherlands Organisation for Scientific Research (NWO) + NL + + + + false + false + 0.900 + null + + + + + + + +
    + +
    + nwo_________::98df9c76cbd6537553284af850398659 + 2016-06-23 + 2018-08-07 +
    + + + + + nwo_________::2300147728 + 2300147728 + Master switches of initiation of mushroom formation + 2005-11-01 + 2012-09-12 + 0 + false + false + false + 0.0 + 0.0 + + + nwo_________::NWO + NWO + Netherlands Organisation for Scientific Research (NWO) + NL + + + + false + false + 0.900 + null + + + + + + + +
    + +
    + nwo_________::7ddc7f2f259735f312a27c54f6b2ee5d + 2016-06-23 + 2018-08-07 +
    + + + + + nwo_________::2300164658 + 2300164658 + Push the white button; controlling mushroom formation + 2011-09-01 + 0 + false + false + false + 0.0 + 0.0 + + + nwo_________::NWO + NWO + Netherlands Organisation for Scientific Research (NWO) + NL + + + + false + false + 0.900 + null + + + + + + + +
    + +
    + nsf_________::c5fa29db776dc4f21919e12cbaea37eb + 2016-03-11 + 2018-08-07 +
    + + + + + nsf_________::6112554 + 6112554 + Respiratory Mechanisms in Cultivated Mushroom + 1961-01-01 + 1963-01-01 + 0 + false + false + false + 0.0 + 0.0 + + + nsf_________::NSF + NSF + National Science Foundation + US + + + + false + false + 0.900 + null + + + + + openorgs____::2f4b2e4dcb319a5f66e887d2fd555734 + + University of Delaware + UD + + + + + + +
    +
    + +
    \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 b/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 new file mode 100644 index 000000000000..a6649c088643 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jp2 differ diff --git a/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jpg b/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jpg new file mode 100644 index 000000000000..7ac530cf24c9 Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/iiif/canvasdimension/cat.jpg differ diff --git a/dspace-api/src/test/resources/test-config.properties b/dspace-api/src/test/resources/test-config.properties index 66a29ab9a09b..06322d4a7e6f 100644 --- a/dspace-api/src/test/resources/test-config.properties +++ b/dspace-api/src/test/resources/test-config.properties @@ -12,4 +12,4 @@ test.folder = ./target/testing/ # Path of the test bitstream (to use in BitstreamTest and elsewhere) test.bitstream = ./target/testing/dspace/assetstore/ConstitutionofIreland.pdf test.exportcsv = ./target/testing/dspace/assetstore/test.csv -test.importcsv = ./target/testing/dspace/assetstore/testImport.csv +test.importcsv = ./target/testing/dspace/assetstore/testImport.csv \ No newline at end of file diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml new file mode 100644 index 000000000000..2f34671139c8 --- /dev/null +++ b/dspace-iiif/pom.xml @@ -0,0 +1,134 @@ + + 4.0.0 + org.dspace + dspace-iiif + jar + DSpace IIIF + + DSpace IIIF Extension + + + + + org.dspace + dspace-parent + 8.0-SNAPSHOT + .. + + + + + ${basedir}/.. + + @ + + + + + + + org.springframework.boot + spring-boot-starter + ${spring-boot.version} + + + org.springframework.boot + spring-boot-starter-logging + + + + + + org.springframework.boot + spring-boot-starter-web + ${spring-boot.version} + + + + org.hibernate.validator + hibernate-validator + + + + + org.springframework.boot + spring-boot-starter-data-rest + ${spring-boot.version} + + + + com.fasterxml.jackson.datatype + jackson-datatype-jdk8 + + + + + org.springframework.boot + spring-boot-starter-security + ${spring-boot.version} + + + + org.springframework.boot + spring-boot-starter-cache + ${spring-boot.version} + + + javax.cache + cache-api + + + + org.ehcache + ehcache + ${ehcache.version} + + + + + + de.digitalcollections.iiif + iiif-apis + 0.3.10 + + + org.javassist + javassist + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + + + org.springframework.security + spring-security-core + + + org.dmfs + iterators + + + com.fasterxml.jackson.module + jackson-module-parameter-names + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + + + + + + + org.dspace + dspace-api + + + + + + diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/IIIFController.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/IIIFController.java new file mode 100644 index 000000000000..491a94d5658d --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/IIIFController.java @@ -0,0 +1,114 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif; + +import java.util.UUID; + +import org.dspace.core.Context; +import org.dspace.web.ContextUtil; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + + +/** + * Controller for IIIF Presentation and Search API. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RestController +@RequestMapping("/iiif") +// Only enable this controller if "iiif.enabled=true" +@ConditionalOnProperty("iiif.enabled") +public class IIIFController { + + @Autowired + IIIFServiceFacade iiifFacade; + + /** + * The manifest response contains sufficient information for the client to initialize + * itself and begin to display something quickly to the user. The manifest resource + * represents a single object and any intellectual work or works embodied within that + * object. In particular it includes the descriptive, rights and linking information + * for the object. It then embeds the sequence(s) of canvases that should be rendered + * to the user. + * + * Called with GET to retrieve the manifest for a single DSpace item. + * + * @param id DSpace Item uuid + * @return manifest as JSON + */ + @RequestMapping(method = RequestMethod.GET, value = "/{id}/manifest") + public String findOne(@PathVariable UUID id) { + Context context = ContextUtil.obtainCurrentRequestContext(); + return iiifFacade.getManifest(context, id); + } + + /** + * Any resource in the Presentation API may have a search service associated with it. + * The resource determines the scope of the content that will be searched. A service + * associated with a manifest will search all of the annotations on canvases or other + * objects below the manifest, a service associated with a particular range will only + * search the canvases within the range, or a service on a canvas will search only + * annotations on that particular canvas. The URIs for services associated with different + * resources must be different to allow the client to use the correct one for the desired + * scope of the search. + * + * This endpoint for searches within the manifest scope (by DSpace item uuid). + * + * @param id DSpace Item uuid + * @param query query terms + * @return AnnotationList as JSON + */ + @RequestMapping(method = RequestMethod.GET, value = "/{id}/manifest/search") + public String searchInManifest(@PathVariable UUID id, + @RequestParam(name = "q") String query) { + Context context = ContextUtil.obtainCurrentRequestContext(); + return iiifFacade.searchInManifest(context, id, query); + } + + /** + * All resources can link to semantic descriptions of themselves via the seeAlso property. + * These could be METS, ALTO, full text, or a schema.org descriptions. + * + * Since there's currently no reliable way to associate "seeAlso" links and individual + * canvases (e.g. associate a single image with its ALTO file) the + * scope is the entire manifest (or DSpace Item). + * + * @param id DSpace Item uuid + * @return AnnotationList as JSON + */ + @RequestMapping(method = RequestMethod.GET, value = "/{id}/manifest/seeAlso") + public String findSeeAlsoList(@PathVariable UUID id) { + Context context = ContextUtil.obtainCurrentRequestContext(); + return iiifFacade.getSeeAlsoAnnotations(context, id); + } + + /** + * The canvas represents an individual page or view and acts as a central point for + * laying out the different content resources that make up the display. This information + * should be embedded within a sequence. + * + * This endpoint allows canvases to be dereferenced separately from the manifest. This + * is an atypical use case. + * + * @param id DSpace Item uuid + * @param cid canvas identifier + * @return canvas as JSON + */ + @RequestMapping(method = RequestMethod.GET, value = "/{id}/canvas/{cid}") + public String findCanvas(@PathVariable UUID id, @PathVariable String cid) { + Context context = ContextUtil.obtainCurrentRequestContext(); + return iiifFacade.getCanvas(context, id, cid); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/IIIFServiceFacade.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/IIIFServiceFacade.java new file mode 100644 index 000000000000..7bb723ea652c --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/IIIFServiceFacade.java @@ -0,0 +1,136 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif; + +import java.sql.SQLException; +import java.util.UUID; + +import org.dspace.app.iiif.service.AnnotationListService; +import org.dspace.app.iiif.service.CanvasLookupService; +import org.dspace.app.iiif.service.ManifestService; +import org.dspace.app.iiif.service.SearchService; +import org.dspace.app.iiif.service.utils.IIIFUtils; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.annotation.Cacheable; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.stereotype.Service; + +/** + * IIIF Service facade to support IIIF Presentation and Search API requests. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@Service +public class IIIFServiceFacade { + + @Autowired + ItemService itemService; + + @Autowired + BitstreamService bitstreamService; + + @Autowired + ManifestService manifestService; + + @Autowired + SearchService searchService; + + @Autowired + AnnotationListService annotationListService; + + @Autowired + CanvasLookupService canvasLookupService; + + @Autowired + IIIFUtils utils; + + /** + * The manifest response contains sufficient information for the client to initialize itself + * and begin to display something quickly to the user. The manifest resource represents a single + * object and any intellectual work or works embodied within that object. In particular it + * includes the descriptive, rights and linking information for the object. It then embeds + * the sequence(s) of canvases that should be rendered to the user. + * + * Returns manifest for single DSpace item. + * + * @param id DSpace Item uuid + * @return manifest as JSON + */ + @Cacheable(key = "#id.toString()", cacheNames = "manifests") + @PreAuthorize("hasPermission(#id, 'ITEM', 'READ')") + public String getManifest(Context context, UUID id) + throws ResourceNotFoundException { + Item item; + try { + item = itemService.find(context, id); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + if (item == null || !utils.isIIIFEnabled(item)) { + throw new ResourceNotFoundException("IIIF manifest for id " + id + " not found"); + } + return manifestService.getManifest(item, context); + } + + /** + * The canvas represents an individual page or view and acts as a central point for + * laying out the different content resources that make up the display. This information + * should be embedded within a sequence. + * + * @param id DSpace item uuid + * @param canvasId canvas identifier + * @return canvas as JSON + */ + @PreAuthorize("hasPermission(#id, 'ITEM', 'READ')") + public String getCanvas(Context context, UUID id, String canvasId) + throws ResourceNotFoundException { + Item item; + try { + item = itemService.find(context, id); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + if (item == null) { + throw new ResourceNotFoundException("IIIF canvas for id " + id + " not found"); + } + return canvasLookupService.generateCanvas(context, item, canvasId); + } + + /** + * Returns search hits and word coordinates as an AnnotationList. + * + * Search scope is a single DSpace item or manifest. + * + * @param id DSpace item uuid + * @param query query terms + * @return AnnotationList as JSON + */ + @PreAuthorize("hasPermission(#id, 'ITEM', 'READ')") + public String searchInManifest(Context context, UUID id, String query) { + + return searchService.searchWithinManifest(id, query); + } + + /** + * Returns annotations for machine readable metadata that describes the resource. + * + * @param id the Item uuid + * @return AnnotationList as JSON + */ + @PreAuthorize("hasPermission(#id, 'ITEM', 'READ')") + public String getSeeAlsoAnnotations(Context context, UUID id) { + return annotationListService.getSeeAlsoAnnotations(context, id); + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/exception/NotImplementedException.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/exception/NotImplementedException.java new file mode 100644 index 000000000000..518794bb7be5 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/exception/NotImplementedException.java @@ -0,0 +1,23 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.exception; + +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.ResponseStatus; + +/** + * This exception is used when the search service has not been implemented + * for this server. + */ +@ResponseStatus(value = HttpStatus.NOT_IMPLEMENTED, reason = "Method not implemented") +public class NotImplementedException extends RuntimeException { + + public NotImplementedException(String message) { + super(message); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/ObjectMapperFactory.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/ObjectMapperFactory.java new file mode 100644 index 000000000000..1e5183fd10cd --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/ObjectMapperFactory.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import de.digitalcollections.iiif.model.jackson.IiifModule; +import de.digitalcollections.iiif.model.jackson.IiifObjectMapper; + +public class ObjectMapperFactory { + + private ObjectMapperFactory() {} + + /** + * Gets the jackson ObjectMapper with dbmdz configuration. + * https://github.com/dbmdz/iiif-apis/blob/main/src/main/java/de/digitalcollections/iiif/model/jackson/IiifObjectMapper.java + * @return jackson mapper + */ + public static ObjectMapper getIiifObjectMapper() { + return new IiifObjectMapper(); + } + + /** + * Gets the jackson SimpleModule with dbmdz configuration. + * https://github.com/dbmdz/iiif-apis/blob/main/src/main/java/de/digitalcollections/iiif/model/jackson/IiifModule.java + * @return model + */ + public static SimpleModule getIiifModule() { + return new IiifModule(); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/AnnotationGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/AnnotationGenerator.java new file mode 100644 index 000000000000..3947df35337f --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/AnnotationGenerator.java @@ -0,0 +1,131 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.NotNull; + +import de.digitalcollections.iiif.model.Motivation; +import de.digitalcollections.iiif.model.openannotation.Annotation; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; + + +/** + * Generator for an {@code annotation} model. Annotations associate content resources and commentary with a canvas. + * This is used for the {@code seeAlso} annotation and Search response. + */ +public class AnnotationGenerator implements IIIFResource { + + public static final String TYPE = "sc:AnnotationList"; + public static final Motivation PAINTING = new Motivation("sc:painting"); + public static final Motivation COMMENTING = new Motivation("oa:commenting"); + public static final Motivation LINKING = new Motivation("oa:linking"); + + private Motivation motivation; + private String identifier; + private CanvasGenerator canvasGenerator; + private ContentAsTextGenerator contentAsTextGenerator; + private ExternalLinksGenerator externalLinksGenerator; + List manifests = new ArrayList<>(); + + + public AnnotationGenerator(@NotNull String identifier) { + if (identifier.isEmpty()) { + throw new RuntimeException("Invalid annotation identifier. Cannot be an empty string."); + } + this.identifier = identifier; + } + + public AnnotationGenerator(@NotNull String identifier, @NotNull Motivation motivation) { + if (identifier.isEmpty()) { + throw new RuntimeException("Invalid annotation identifier. Cannot be an empty string."); + } + this.identifier = identifier; + this.motivation = motivation; + } + + /** + * Sets the motivation field. Required. + * @param motivation the motivation + * @return + */ + public AnnotationGenerator setMotivation(@NotNull Motivation motivation) { + this.motivation = motivation; + return this; + } + + /** + * Sets the canvas that is associated with this annotation. + * @param canvas + * @return + */ + public AnnotationGenerator setOnCanvas(CanvasGenerator canvas) { + this.canvasGenerator = canvas; + return this; + } + + /** + * Sets a text resource for this annotation. + * @param contentAsText + * @return + */ + public AnnotationGenerator setResource(ContentAsTextGenerator contentAsText) { + this.contentAsTextGenerator = contentAsText; + return this; + } + + /** + * Sets an external link for this annotation. + * @param otherContent external link generator + * @return + */ + public AnnotationGenerator setResource(ExternalLinksGenerator otherContent) { + this.externalLinksGenerator = otherContent; + return this; + } + + /** + * Set the within property for this annotation. This property + * is a list of manifests. The property is renamed to partOf in v3 + *

    Used by search result annotations.

    + * @param within + * @return + */ + public AnnotationGenerator setWithin(List within) { + for (ManifestGenerator manifest : within) { + this.manifests.add(manifest.generateResource()); + } + return this; + } + + @Override + public Resource generateResource() { + if (identifier == null) { + throw new RuntimeException("Annotations require an identifier."); + } + Annotation annotation; + if (motivation != null) { + annotation = new Annotation(identifier, motivation); + } else { + annotation = new Annotation(identifier); + } + annotation.setWithin(manifests); + // These optional annotation fields vary with the context. + if (canvasGenerator != null) { + annotation.setOn(canvasGenerator.generateResource()); + } + if (externalLinksGenerator != null) { + annotation.setResource(externalLinksGenerator.generateResource()); + } + if (contentAsTextGenerator != null) { + annotation.setResource(contentAsTextGenerator.generateResource()); + } + return annotation; + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/AnnotationListGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/AnnotationListGenerator.java new file mode 100644 index 000000000000..da977a5ccc0d --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/AnnotationListGenerator.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.NotNull; + +import de.digitalcollections.iiif.model.openannotation.Annotation; +import de.digitalcollections.iiif.model.sharedcanvas.AnnotationList; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This generator wraps the domain model for the {@code AnnotationList}. + * + *

    Please note that this is a request scoped bean. This means that for each http request a + * different instance will be initialized by Spring and used to serve this specific request.

    + * + *

    The model represents an ordered list of annotations.

    + */ +@RequestScope +@Component +public class AnnotationListGenerator implements IIIFResource { + + private String identifier; + private List annotations = new ArrayList<>(); + + /** + * Sets the required annotation identifier. + * @param identifier the annotation identifier + */ + public void setIdentifier(@NotNull String identifier) { + + this.identifier = identifier; + } + + /** + * Adds Annotation resource to the annotation list. + * @param annotation an annotation generator + */ + public void addResource(AnnotationGenerator annotation) { + this.annotations.add((Annotation) annotation.generateResource()); + } + + @Override + public Resource generateResource() { + if (identifier == null) { + throw new RuntimeException("Missing the required identifier for the annotation list."); + } + AnnotationList annotationList = new AnnotationList(identifier); + annotationList.setResources(annotations); + return annotationList; + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/BehaviorGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/BehaviorGenerator.java new file mode 100644 index 000000000000..75b5b67d4854 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/BehaviorGenerator.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import de.digitalcollections.iiif.model.enums.ViewingHint; + +/** + * API 2.1.1 ViewingHint is a hint to the client that suggests the appropriate method of + * displaying the resource. + * + * With IIIF Presentation API 3.0 the viewingHint property is renamed to "behavior". + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +public class BehaviorGenerator implements IIIFValue { + + private String type; + + public BehaviorGenerator setType(String type) { + this.type = type; + return this; + } + + @Override + public ViewingHint generateValue() { + if (type == null) { + throw new RuntimeException("Type must be provided for viewing hint."); + } + return new ViewingHint(type); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/CanvasGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/CanvasGenerator.java new file mode 100644 index 000000000000..f064a1b974ce --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/CanvasGenerator.java @@ -0,0 +1,146 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.NotNull; + +import de.digitalcollections.iiif.model.ImageContent; +import de.digitalcollections.iiif.model.MetadataEntry; +import de.digitalcollections.iiif.model.sharedcanvas.Canvas; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; + +/** + * This generator wraps the domain model for a single {@code Canvas}. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +public class CanvasGenerator implements IIIFResource { + + private final String identifier; + private final List metadata = new ArrayList<>(); + private final List images = new ArrayList(); + private String label; + private Integer height; + private Integer width; + private ImageContent thumbnail; + + /** + * Constructor + * @param identifier the canvas identifier + */ + public CanvasGenerator(@NotNull String identifier) { + if (identifier.isEmpty()) { + throw new RuntimeException("Invalid canvas identifier. Cannot be an empty string."); + } + this.identifier = identifier; + } + + public String getIdentifier() { + return identifier; + } + + /** + * Adds a canvas label. + * @param label + */ + public CanvasGenerator setLabel(String label) { + this.label = label; + return this; + } + + /** + * Sets the canvas height. A canvas annotation with motivation {@code sc:painting} must have an pixel height. + * @param height canvas height in pixels + */ + public CanvasGenerator setHeight(int height) { + this.height = height; + return this; + } + + /** + * Sets the canvas width. A canvas annotation with motivation {@code sc:painting} must have a pixel width. + * @param width canvas width in pixels + */ + public CanvasGenerator setWidth(int width) { + this.width = width; + return this; + } + + /** + * Add to the list of image content resources for the canvas. + * @param imageContent image content model + */ + public CanvasGenerator addImage(Resource imageContent) { + images.add((ImageContent) imageContent); + return this; + } + + /** + * Adds the thumbnail resource that will be assigned to the canvas. + * @param thumbnail image content model + */ + public CanvasGenerator addThumbnail(Resource thumbnail) { + this.thumbnail = (ImageContent) thumbnail; + return this; + } + + /** + * Adds single metadata field to Manifest. + * @param field property field + * @param value property value + */ + public void addMetadata(String field, String value, String... rest) { + MetadataEntryGenerator metadataEntryGenerator = new MetadataEntryGenerator(); + metadataEntryGenerator.setField(field); + metadataEntryGenerator.setValue(value, rest); + metadata.add(metadataEntryGenerator.generateValue()); + } + + /** + * Returns the canvas. + * @return canvas model + */ + @Override + public Resource generateResource() { + /** + * The Canvas resource typically includes image content. + */ + Canvas canvas; + if (identifier == null) { + throw new RuntimeException("The Canvas resource requires an identifier."); + } + if (label != null) { + canvas = new Canvas(identifier, label); + } else { + canvas = new Canvas(identifier); + } + if (images.size() > 0) { + if (height == null || width == null) { + throw new RuntimeException("The Canvas resource requires both height and width dimensions."); + } + canvas.setWidth(width); + canvas.setHeight(height); + for (ImageContent res : images) { + canvas.addImage(res); + } + if (thumbnail != null) { + canvas.addThumbnail(thumbnail); + } + } + if (metadata.size() > 0) { + for (MetadataEntry meta : metadata) { + canvas.addMetadata(meta); + } + } + return canvas; + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/CanvasItemsGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/CanvasItemsGenerator.java new file mode 100644 index 000000000000..f9dfaa262484 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/CanvasItemsGenerator.java @@ -0,0 +1,64 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import java.util.ArrayList; +import java.util.List; + +import de.digitalcollections.iiif.model.sharedcanvas.Canvas; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; +import de.digitalcollections.iiif.model.sharedcanvas.Sequence; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This generator wraps the domain model for a Presentation API 2.1.1 {@code Sequence}. The IIIF sequence + * conveys the ordering of the views of the object. + * + *

    Please note that this is a request scoped bean. This means that for each http request a + * different instance will be initialized by Spring and used to serve this specific request.

    + * + *

    Sequence is removed with Presentation API version 3.0. Canvases are added to the Manifest items property instead. + *

    + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class CanvasItemsGenerator implements IIIFResource { + + private String identifier; + private final List canvas = new ArrayList<>(); + + /** + * Sets the required identifier property. + * @param identifier URI string + */ + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + + + /** + * Adds a single {@code Canvas} to the sequence. + * @param canvas generator for canvas + */ + public String addCanvas(CanvasGenerator canvas) { + Canvas resource = (Canvas) canvas.generateResource(); + this.canvas.add(resource); + return resource.getIdentifier().toString(); + } + + @Override + public Resource generateResource() { + Sequence items = new Sequence(identifier); + items.setCanvases(canvas); + return items; + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ContentAsTextGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ContentAsTextGenerator.java new file mode 100644 index 000000000000..e83c34bfce1d --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ContentAsTextGenerator.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import de.digitalcollections.iiif.model.openannotation.ContentAsText; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; +import org.springframework.context.annotation.Scope; +import org.springframework.stereotype.Component; + +/** + * Generator for a text annotation. + */ +@Scope("prototype") +@Component +public class ContentAsTextGenerator implements IIIFResource { + + private String text; + + public void setText(String text) { + this.text = text; + } + + @Override + public Resource generateResource() { + if (text == null) { + throw new RuntimeException("Missing required text for the text annotation."); + } + return new ContentAsText(text); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ContentSearchGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ContentSearchGenerator.java new file mode 100644 index 000000000000..28cc13c07d36 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ContentSearchGenerator.java @@ -0,0 +1,78 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import javax.validation.constraints.NotNull; + +import de.digitalcollections.iiif.model.Profile; +import de.digitalcollections.iiif.model.Service; +import de.digitalcollections.iiif.model.search.ContentSearchService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This generator wraps the search service annotation that is added to + * the manifest for searchable items. Only a single search service is defined + * for the manifest. There should be a single instance of this object per request. + * The {@code @RequestScope} provides a single instance created and available during + * complete lifecycle of the HTTP request. + */ +@RequestScope +@Component +public class ContentSearchGenerator implements IIIFService { + + private String identifier; + private String label; + + @Autowired + ProfileGenerator profile; + + /** + * Mandatory URI for search service. + * @param identifier + */ + public void setIdentifier(@NotNull String identifier) { + if (identifier.isEmpty()) { + throw new RuntimeException("The search service requires an identifier."); + } + this.identifier = identifier; + } + + /** + * Optional label for the search service. + * @param label the search service label. + */ + public void setLabel(String label) { + this.label = label; + } + + @Override + public Service generateService() { + if (identifier == null) { + throw new RuntimeException("You must provide an identifier for the search service."); + } + ContentSearchService contentSearchService = new ContentSearchService(identifier); + if (label != null) { + contentSearchService.setLabel(label); + } + try { + contentSearchService.setContext(new URI("http://iiif.io/api/search/0/context.json")); + } catch (URISyntaxException e) { + e.printStackTrace(); + } + ArrayList profiles = new ArrayList<>(); + profile.setIdentifier("http://iiif.io/api/search/0/search"); + profiles.add(profile.generateValue()); + contentSearchService.setProfiles(profiles); + return contentSearchService; + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ExternalLinksGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ExternalLinksGenerator.java new file mode 100644 index 000000000000..94c18283753c --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ExternalLinksGenerator.java @@ -0,0 +1,84 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import javax.validation.constraints.NotNull; + +import de.digitalcollections.iiif.model.OtherContent; +import de.digitalcollections.iiif.model.PropertyValue; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; + +/** + * This generator wraps the other content domain model. + * + * This is the type for related content resources. Used in the "related", "renderings" and + * "seeAlso" fields of IIIF resources. + */ +public class ExternalLinksGenerator implements IIIFResource { + + private final String identifier; + private String format; + private String label; + private String type; + + public ExternalLinksGenerator(@NotNull String identifier) { + if (identifier.isEmpty()) { + throw new RuntimeException("Mandatory external links identifier cannot be an empty string"); + } + this.identifier = identifier; + } + + /** + * Sets the optional format value. + * @param format the mimetype + */ + public ExternalLinksGenerator setFormat(String format) { + this.format = format; + return this; + } + + /** + * Sets the optional label. + * @param label annotation label + */ + public ExternalLinksGenerator setLabel(String label) { + this.label = label; + return this; + } + + /** + * Sets the optional type. + * @param type the annotation type + */ + public ExternalLinksGenerator setType(String type) { + this.type = type; + return this; + } + + @Override + public Resource generateResource() { + if (identifier == null) { + throw new RuntimeException("External links annotation requires an identifier"); + } + OtherContent otherContent; + if (format != null) { + otherContent = new OtherContent(identifier, format); + } else { + otherContent = new OtherContent(identifier); + } + if (label != null) { + otherContent.setLabel(new PropertyValue(label)); + } + if (type != null) { + otherContent.setType(type); + } + + return otherContent; + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/IIIFResource.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/IIIFResource.java new file mode 100644 index 000000000000..7599349fcb67 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/IIIFResource.java @@ -0,0 +1,23 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import de.digitalcollections.iiif.model.sharedcanvas.Resource; + +/** + * Interface for iiif resource generators. + */ +public interface IIIFResource { + + /** + * Creates and returns a resource model. + * @return resource model + */ + Resource generateResource(); + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/IIIFService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/IIIFService.java new file mode 100644 index 000000000000..33f7b7e1d554 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/IIIFService.java @@ -0,0 +1,22 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import de.digitalcollections.iiif.model.Service; + +/** + * Interface for iiif service generators. + */ +public interface IIIFService { + + /** + * Creates and returns a service model + * @return a service model + */ + Service generateService(); +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/IIIFValue.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/IIIFValue.java new file mode 100644 index 000000000000..dbaf9afcbc0a --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/IIIFValue.java @@ -0,0 +1,20 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +/** + * Interface for iiif value generators. + */ +public interface IIIFValue { + + /** + * creates and returns a value model. + * @return a value model. + */ + Object generateValue(); +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ImageContentGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ImageContentGenerator.java new file mode 100644 index 000000000000..aef979b6353e --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ImageContentGenerator.java @@ -0,0 +1,55 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import javax.validation.constraints.NotNull; + +import de.digitalcollections.iiif.model.ImageContent; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; + +/** + * This service generator wraps the image content model. + * + * Presentation API version 2.1.1: The ImageContent entity is contained in the "resource" + * field of annotations with motivation "sc:painting". Image resources, and only image resources, + * are included in the image's property of the canvas. This changes in API version 3.0. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +public class ImageContentGenerator implements IIIFResource { + + private final ImageContent imageContent; + + public ImageContentGenerator(@NotNull String identifier) { + imageContent = new ImageContent(identifier); + } + + /** + * Sets the optional mimetype. + * @param mimetype + */ + public ImageContentGenerator setFormat(String mimetype) { + imageContent.setFormat(mimetype); + return this; + } + + /** + * Adds the IIIF image service annotation. + * @param imageService + */ + public ImageContentGenerator addService(ImageServiceGenerator imageService) { + this.imageContent.addService(imageService.generateService()); + return this; + } + + @Override + public Resource generateResource() { + return imageContent; + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ImageServiceGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ImageServiceGenerator.java new file mode 100644 index 000000000000..29b907294960 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ImageServiceGenerator.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import de.digitalcollections.iiif.model.Service; +import de.digitalcollections.iiif.model.image.ImageService; + +/** + * This service generator wraps the image service property model. An image service + * annotation is added to each canvas annotation. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +public class ImageServiceGenerator implements IIIFService { + + private ImageService imageService; + + public ImageServiceGenerator(String identifier) { + imageService = new ImageService(identifier); + } + + /** + * Sets the IIIF image profile. + * @param profile a profile generator + */ + public ImageServiceGenerator setProfile(ProfileGenerator profile) { + imageService.addProfile(profile.generateValue()); + return this; + } + + @Override + public Service generateService() { + return imageService; + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ManifestGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ManifestGenerator.java new file mode 100644 index 000000000000..807269264088 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ManifestGenerator.java @@ -0,0 +1,239 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import java.net.URI; +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.NotNull; + +import de.digitalcollections.iiif.model.ImageContent; +import de.digitalcollections.iiif.model.MetadataEntry; +import de.digitalcollections.iiif.model.OtherContent; +import de.digitalcollections.iiif.model.PropertyValue; +import de.digitalcollections.iiif.model.enums.ViewingHint; +import de.digitalcollections.iiif.model.search.ContentSearchService; +import de.digitalcollections.iiif.model.sharedcanvas.Manifest; +import de.digitalcollections.iiif.model.sharedcanvas.Range; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; +import de.digitalcollections.iiif.model.sharedcanvas.Sequence; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This generator wraps a domain model for the {@code Manifest}. + *

    + * Please note that this is a request scoped bean. This mean that for each http request a + * different instance will be initialized by Spring and used to serve this specific request.

    + *

    + * The Manifest is an overall description of the structure and properties of the digital representation + * of an object. It carries information needed for the viewer to present the digitized content to the user, + * such as a title and other descriptive information about the object or the intellectual work that + * it conveys. Each manifest describes how to present a single object such as a book, a photograph, + * or a statue.

    + * + * Please note that this is a request scoped bean. This means that for each http request a + * different instance will be initialized by Spring and used to serve this specific request. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class ManifestGenerator implements IIIFResource { + + private String identifier; + private String label; + private PropertyValue description; + private ImageContent logo; + private ViewingHint viewingHint; + private Sequence sequence; + private OtherContent seeAlso; + private OtherContent related; + private ImageContent thumbnail; + private ContentSearchService searchService; + private List renderings = new ArrayList<>(); + private final List license = new ArrayList<>(); + private final List metadata = new ArrayList<>(); + private final List ranges = new ArrayList<>(); + + /** + * Sets the mandatory manifest identifier. + * @param identifier manifest identifier + */ + public void setIdentifier(@NotNull String identifier) { + + if (identifier.isEmpty()) { + throw new RuntimeException("Invalid manifest identifier. Cannot be an empty string."); + } + this.identifier = identifier; + } + + /** + * Sets the manifest label. + * @param label manifest label + */ + public void setLabel(String label) { + this.label = label; + } + + public void addLogo(ImageContentGenerator logo) { + this.logo = (ImageContent) logo.generateResource(); + } + + /** + * Sets the viewing hint. In IIIF Presentation API version 3.0 semantics this becomes the "behavior" + * @param viewingHint a viewing hint + */ + public void addViewingHint(String viewingHint) { + BehaviorGenerator hint = new BehaviorGenerator().setType(viewingHint); + this.viewingHint = hint.generateValue(); + } + + /** + * Adds add single (mandatory) {@ode sequence} to the manifest. In IIIF Presentation API 3.0 "sequence" + * is replaced by "items" + * @param sequence canvas list model (sequence) + */ + public void addSequence(CanvasItemsGenerator sequence) { + this.sequence = (Sequence) sequence.generateResource(); + } + + /** + * Adds an optional {@code seeAlso} element to Manifest. + * @param seeAlso other content model + */ + public void addSeeAlso(ExternalLinksGenerator seeAlso) { + this.seeAlso = (OtherContent) seeAlso.generateResource(); + } + + /** + * Adds optional thumbnail image resource to manifest. + * @param thumbnail an image content generator + */ + public void addThumbnail(ImageContentGenerator thumbnail) { + this.thumbnail = (ImageContent) thumbnail.generateResource(); + } + + /** + * Adds an optional {@code related} field to the manifest. + * @param related other content generator + */ + public void addRelated(ExternalLinksGenerator related) { + this.related = (OtherContent) related.generateResource(); + } + + /** + * Adds optional search service to the manifest. + * @param searchService search service generator + */ + public void addService(ContentSearchGenerator searchService) { + this.searchService = (ContentSearchService) searchService.generateService(); + } + + /** + * Adds a single metadata field to Manifest. + * @param field property field + * @param value property value + */ + public void addMetadata(String field, String value, String... rest) { + MetadataEntryGenerator meg = new MetadataEntryGenerator().setField(field).setValue(value, rest); + metadata.add(meg.generateValue()); + } + + /** + * Adds an optional license to manifest. + * @param license license terms + */ + public void addLicense(String license) { + this.license.add(URI.create(license)); + } + + /** + * Adds optional description to Manifest. + * @param value the description value + */ + public void addDescription(String value) { + description = new PropertyValueGenerator().getPropertyValue(value).generateValue(); + } + + /** + * Adds optional Range to the manifest's structures element. + * @param rangeGenerator to add + */ + public void addRange(RangeGenerator rangeGenerator) { + ranges.add((Range) rangeGenerator.generateResource()); + } + + /** + * Adds a rendering annotation to the Sequence. The rendering is a link to an external resource intended + * for display or download by a human user. This is typically going to be a PDF file. + * @param otherContent generator for the resource + */ + public void addRendering(ExternalLinksGenerator otherContent) { + this.renderings.add((OtherContent) otherContent.generateResource()); + } + + @Override + public Resource generateResource() { + + if (identifier == null) { + throw new RuntimeException("The Manifest resource requires an identifier."); + } + Manifest manifest; + if (label != null) { + manifest = new Manifest(identifier, label); + } else { + manifest = new Manifest(identifier); + } + if (renderings.size() > 0) { + manifest.setRenderings(renderings); + } + if (logo != null) { + List logos = new ArrayList<>(); + logos.add(logo); + manifest.setLogos(logos); + } + if (sequence != null) { + manifest.addSequence(sequence); + } + if (ranges.size() > 0) { + for (Range range : ranges) { + manifest.addRange(range); + } + } + if (metadata.size() > 0) { + for (MetadataEntry meta : metadata) { + manifest.addMetadata(meta); + } + } + if (seeAlso != null) { + manifest.addSeeAlso(seeAlso); + } + if (related != null) { + manifest.addRelated(related); + } + if (searchService != null) { + manifest.addService(searchService); + } + if (license.size() > 0) { + manifest.setLicenses(license); + } + if (description != null) { + manifest.setDescription(description); + } + if (thumbnail != null) { + manifest.addThumbnail(thumbnail); + } + if (viewingHint != null) { + manifest.addViewingHint(viewingHint); + } + return manifest; + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/MetadataEntryGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/MetadataEntryGenerator.java new file mode 100644 index 000000000000..5eb92dfa4d04 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/MetadataEntryGenerator.java @@ -0,0 +1,55 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import de.digitalcollections.iiif.model.MetadataEntry; +import de.digitalcollections.iiif.model.PropertyValue; +import org.dspace.core.I18nUtil; + +/** + * Wraps the domain model metadata property. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +public class MetadataEntryGenerator implements IIIFValue { + + private String field; + private String value; + private String[] rest; + + /** + * Set metadata field name. + * @param field field name + */ + public MetadataEntryGenerator setField(String field) { + this.field = field; + return this; + } + + /** + * Set metadata value. + * @param value metadata value + */ + public MetadataEntryGenerator setValue(String value, String... rest) { + this.value = value; + this.rest = rest; + return this; + } + + @Override + public MetadataEntry generateValue() { + PropertyValue metadataValues; + if (rest != null && rest.length > 0) { + metadataValues = new PropertyValue(value, rest); + } else { + metadataValues = new PropertyValue(value); + } + return new MetadataEntry(new PropertyValue(I18nUtil.getMessage("metadata." + field)), metadataValues); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ProfileGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ProfileGenerator.java new file mode 100644 index 000000000000..4c497763a66e --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/ProfileGenerator.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import java.net.URI; +import java.net.URISyntaxException; + +import de.digitalcollections.iiif.model.Profile; +import org.springframework.context.annotation.Scope; +import org.springframework.stereotype.Component; + +/** + * This class wraps the domain model service profile. + */ +@Scope("prototype") +@Component +public class ProfileGenerator implements IIIFValue { + + private String identifier; + /** + * Input String will be converted to URI for use in the Profile. + * @param identifier URI as string + */ + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + + @Override + public Profile generateValue() { + try { + return new Profile(new URI(identifier)); + } catch (URISyntaxException e) { + throw new RuntimeException(e.getMessage(), e); + } + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/PropertyValueGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/PropertyValueGenerator.java new file mode 100644 index 000000000000..2568d715a3f6 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/PropertyValueGenerator.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import de.digitalcollections.iiif.model.PropertyValue; + +/** + * This class wraps the domain model property value annotation. The property is the type for + * strings that are intended to be displayed to the user. + */ +public class PropertyValueGenerator implements IIIFValue { + + private PropertyValue propertyValue; + + public PropertyValueGenerator getPropertyValue(String val1, String val2) { + propertyValue = new PropertyValue(val1, val2); + return this; + } + + public PropertyValueGenerator getPropertyValue(String val1) { + propertyValue = new PropertyValue(val1); + return this; + } + + @Override + public PropertyValue generateValue() { + return propertyValue; + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/RangeGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/RangeGenerator.java new file mode 100644 index 000000000000..fe8acf31c43f --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/RangeGenerator.java @@ -0,0 +1,120 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import java.util.ArrayList; +import java.util.List; +import javax.validation.constraints.NotNull; + +import de.digitalcollections.iiif.model.enums.ViewingHint; +import de.digitalcollections.iiif.model.sharedcanvas.Canvas; +import de.digitalcollections.iiif.model.sharedcanvas.Range; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; +import org.dspace.app.iiif.service.RangeService; + +/** + * This generator wraps the domain model for IIIF {@code ranges}. + * + * In Presentation API version 2.1.1, adding a range to the manifest allows the client to display a structured + * hierarchy to enable the user to navigate within the object without merely stepping through the current sequence. + * + * This is used to populate the "structures" element of the Manifest. The structure is derived from the iiif.toc + * metadata and the ordered sequence of bitstreams (canvases) + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +public class RangeGenerator implements IIIFResource { + + private String identifier; + private String label; + private final List viewingHint = new ArrayList<>(); + private final List canvasList = new ArrayList<>(); + private final List rangesList = new ArrayList<>(); + private final RangeService rangeService; + + /** + * The {@code RangeService} is used for defining hierarchical sub ranges. + * @param rangeService range service + */ + public RangeGenerator(RangeService rangeService) { + this.rangeService = rangeService; + } + + /** + * Sets mandatory range identifier. + * @param identifier range identifier + */ + public RangeGenerator setIdentifier(@NotNull String identifier) { + if (identifier.isEmpty()) { + throw new RuntimeException("Invalid range identifier. Cannot be an empty string."); + } + this.identifier = identifier; + return this; + } + + public String getIdentifier() { + return identifier; + } + + /** + * Sets the optional range label. + * @param label range label + */ + public RangeGenerator setLabel(String label) { + this.label = label; + return this; + } + + public RangeGenerator addViewingHint(String hint) { + viewingHint.add(new BehaviorGenerator().setType(hint).generateValue()); + return this; + } + + /** + * Adds canvas to range canvas list. + * @param canvas list of canvas generators + */ + public RangeGenerator addCanvas(CanvasGenerator canvas) { + canvasList.add((Canvas) canvas.generateResource()); + return this; + } + + /** + * Sets the range identifier and adds a sub range to the ranges list. + * @param range range generator + */ + public void addSubRange(RangeGenerator range) { + range.setIdentifier(identifier + "-" + rangesList.size()); + RangeGenerator rangeReference = rangeService.getRangeReference(range); + rangesList.add((Range) rangeReference.generateResource()); + } + + @Override + public Resource generateResource() { + if (identifier == null) { + throw new RuntimeException("The Range resource requires an identifier."); + } + Range range; + if (label != null) { + range = new Range(identifier, label); + } else { + range = new Range(identifier); + } + if (viewingHint.size() > 0) { + range.setViewingHints(viewingHint); + } + for (Canvas canvas : canvasList) { + range.addCanvas(canvas); + } + for (Range rangeResource : rangesList) { + range.addRange(rangeResource); + } + return range; + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/SearchResultGenerator.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/SearchResultGenerator.java new file mode 100644 index 000000000000..f1eac30906e6 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/model/generator/SearchResultGenerator.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.model.generator; + +import java.util.ArrayList; +import java.util.List; + +import de.digitalcollections.iiif.model.openannotation.Annotation; +import de.digitalcollections.iiif.model.search.SearchResult; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This generator wraps a domain model for a {@code SearchResult}. + * + *

    Please note that this is a request scoped bean. This means that for each http request a + * different instance will be initialized by Spring and used to serve this specific request.

    + */ +@RequestScope +@Component +public class SearchResultGenerator implements IIIFResource { + + private String identifier; + private final List annotations = new ArrayList<>(); + + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + + public void addResource(AnnotationGenerator annotation) { + annotations.add((Annotation) annotation.generateResource()); + } + + @Override + public Resource generateResource() { + SearchResult searchResult = new SearchResult(identifier); + searchResult.setResources(annotations); + return searchResult; + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/AbstractResourceService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/AbstractResourceService.java new file mode 100644 index 000000000000..ce56a32c1e55 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/AbstractResourceService.java @@ -0,0 +1,93 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import java.util.UUID; + +import org.dspace.app.iiif.service.utils.IIIFUtils; +import org.dspace.app.iiif.service.utils.ImageProfileUtil; +import org.dspace.app.iiif.service.utils.ThumbProfileUtil; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Base class for services. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +public abstract class AbstractResourceService { + /** + * These values are defined in dspace configuration. + */ + protected String IIIF_ENDPOINT; + protected String IMAGE_SERVICE; + protected String SEARCH_URL; + protected String CLIENT_URL; + protected String IIIF_LOGO_IMAGE; + protected String BITSTREAM_PATH_PREFIX; + protected int DEFAULT_CANVAS_WIDTH; + protected int DEFAULT_CANVAS_HEIGHT; + /** + * Possible values: "paged" or "individuals". The property + * value is set in dspace configuration. + */ + protected static String DOCUMENT_VIEWING_HINT; + + // Paths for IIIF Image API requests. + protected static final String THUMBNAIL_PATH = "/full/90,/0/default.jpg"; + protected static final String IMAGE_PATH = "/full/full/0/default.jpg"; + // Default canvas dimensions. + protected static Integer defaultCanvasWidthFallback = 2200; + protected static Integer defaultCanvasHeightFallback = 1600; + + @Autowired + IIIFUtils utils; + + @Autowired + ThumbProfileUtil thumbUtil; + + @Autowired + ImageProfileUtil imageUtil; + + ConfigurationService configurationService; + + + /** + * Set constants using DSpace configuration definitions. + * @param configurationService the DSpace configuration service + */ + protected void setConfiguration(ConfigurationService configurationService) { + this.configurationService = configurationService; + IIIF_ENDPOINT = configurationService.getProperty("dspace.server.url") + "/iiif/"; + IMAGE_SERVICE = configurationService.getProperty("iiif.image.server"); + SEARCH_URL = configurationService.getProperty("iiif.search.url"); + BITSTREAM_PATH_PREFIX = configurationService.getProperty("dspace.server.url") + "/api/core/bitstreams"; + DOCUMENT_VIEWING_HINT = configurationService.getProperty("iiif.document.viewing.hint"); + CLIENT_URL = configurationService.getProperty("dspace.ui.url"); + IIIF_LOGO_IMAGE = configurationService.getProperty("iiif.logo.image"); + } + + protected void setDefaultCanvasDimensions() { + DEFAULT_CANVAS_WIDTH = this.configurationService.getIntProperty("iiif.canvas.default-width", + defaultCanvasWidthFallback); + DEFAULT_CANVAS_HEIGHT = this.configurationService.getIntProperty("iiif.canvas.default-height", + defaultCanvasHeightFallback); + } + + /** + * Creates the manifest id from the provided uuid. + * @param uuid the item id + * @return the manifest identifier (url) + */ + protected String getManifestId(UUID uuid) { + return IIIF_ENDPOINT + uuid + "/manifest"; + } + + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/AnnotationListService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/AnnotationListService.java new file mode 100644 index 000000000000..e738b80105f6 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/AnnotationListService.java @@ -0,0 +1,118 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; + +import org.dspace.app.iiif.model.generator.AnnotationGenerator; +import org.dspace.app.iiif.model.generator.AnnotationListGenerator; +import org.dspace.app.iiif.model.generator.ExternalLinksGenerator; +import org.dspace.app.iiif.service.utils.IIIFUtils; +import org.dspace.content.Bitstream; +import org.dspace.content.BitstreamFormat; +import org.dspace.content.Item; +import org.dspace.content.service.BitstreamFormatService; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This service provides methods for creating an {@code Annotation List}. There should be a single instance of + * this service per request. The {@code @RequestScope} provides a single instance created and available during + * complete lifecycle of the HTTP request. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class AnnotationListService extends AbstractResourceService { + + + @Autowired + IIIFUtils utils; + + @Autowired + ItemService itemService; + + @Autowired + BitstreamService bitstreamService; + + @Autowired + BitstreamFormatService bitstreamFormatService; + + @Autowired + AnnotationListGenerator annotationList; + + + public AnnotationListService(ConfigurationService configurationService) { + setConfiguration(configurationService); + } + + /** + * Returns an AnnotationList for bitstreams in the OtherContent bundle. + * These resources are not appended directly to the manifest but can be accessed + * via the seeAlso link. + * + * The semantics of this linking property may be extended to full text files, but + * machine readable formats like ALTO, METS, and schema.org descriptions are preferred. + * + * @param context DSpace context + * @param id bitstream uuid + * @return AnnotationList as JSON + */ + public String getSeeAlsoAnnotations(Context context, UUID id) + throws RuntimeException { + + // We need the DSpace item to proceed + Item item; + try { + item = itemService.find(context, id); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + // AnnotationList requires an identifier. + annotationList.setIdentifier(IIIF_ENDPOINT + id + "/manifest/seeAlso"); + + // Get the "seeAlso" bitstreams for the item. Add + // Annotations for each bitstream found. + List bitstreams = utils.getSeeAlsoBitstreams(item); + for (Bitstream bitstream : bitstreams) { + BitstreamFormat format; + String mimetype; + try { + format = bitstream.getFormat(context); + mimetype = format.getMIMEType(); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + AnnotationGenerator annotation = new AnnotationGenerator(IIIF_ENDPOINT + bitstream.getID()) + .setMotivation(AnnotationGenerator.LINKING) + .setResource(getLinksGenerator(mimetype, bitstream)); + annotationList.addResource(annotation); + } + return utils.asJson(annotationList.generateResource()); + } + + private ExternalLinksGenerator getLinksGenerator(String mimetype, Bitstream bitstream) { + String identifier = BITSTREAM_PATH_PREFIX + + "/" + + bitstream.getID() + + "/content"; + + return new ExternalLinksGenerator(identifier) + .setFormat(mimetype) + .setLabel(bitstream.getName()); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/CanvasLookupService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/CanvasLookupService.java new file mode 100644 index 000000000000..d9c9478804b7 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/CanvasLookupService.java @@ -0,0 +1,62 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import java.sql.SQLException; + +import org.dspace.app.iiif.model.generator.CanvasGenerator; +import org.dspace.app.iiif.service.utils.IIIFUtils; +import org.dspace.content.Bitstream; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This service provides methods for creating a single {@code Canvas}. There should be a single instance of + * this service per request. The {@code @RequestScope} provides a single instance created and available during + * complete lifecycle of the HTTP request. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class CanvasLookupService extends AbstractResourceService { + + @Autowired + IIIFUtils utils; + + @Autowired + CanvasService canvasService; + + public CanvasLookupService(ConfigurationService configurationService) { + setConfiguration(configurationService); + } + + public String generateCanvas(Context context, Item item, String canvasId) { + int canvasPosition = utils.getCanvasId(canvasId); + Bitstream bitstream = utils.getBitstreamForCanvas(context, item, canvasPosition); + if (bitstream == null) { + throw new ResourceNotFoundException(); + } + String mimeType = utils.getBitstreamMimeType(bitstream, context); + CanvasGenerator canvasGenerator; + try { + canvasGenerator = canvasService.getCanvas(context, item.getID().toString(), bitstream, + bitstream.getBundles().get(0), item, canvasPosition, mimeType); + } catch (SQLException e) { + throw new RuntimeException(e); + } + return utils.asJson(canvasGenerator.generateResource()); + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/CanvasService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/CanvasService.java new file mode 100644 index 000000000000..dcfb707d62a8 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/CanvasService.java @@ -0,0 +1,267 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import static org.dspace.app.iiif.service.utils.IIIFUtils.METADATA_IMAGE_WIDTH; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; +import org.dspace.app.iiif.model.generator.CanvasGenerator; +import org.dspace.app.iiif.model.generator.ImageContentGenerator; +import org.dspace.app.iiif.service.utils.BitstreamIIIFVirtualMetadata; +import org.dspace.app.iiif.service.utils.IIIFUtils; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.core.I18nUtil; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This service provides methods for creating {@code Canvases}. There should be a single instance of + * this service per request. The {@code @RequestScope} provides a single instance created and available during + * complete lifecycle of the HTTP request. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class CanvasService extends AbstractResourceService { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(CanvasService.class); + + @Autowired + ImageContentService imageContentService; + + @Autowired + IIIFUtils utils; + + @Autowired + ApplicationContext applicationContext; + + protected String[] BITSTREAM_METADATA_FIELDS; + + /** + * Used when default dimensions are set to -1 in configuration. + */ + int dynamicDefaultWidth = 0; + int dynamicDefaultHeight = 0; + + + /** + * Constructor. + * + * @param configurationService the DSpace configuration service. + */ + public CanvasService(ConfigurationService configurationService) { + setConfiguration(configurationService); + BITSTREAM_METADATA_FIELDS = configurationService.getArrayProperty("iiif.metadata.bitstream"); + // Set default dimensions in parent class. + setDefaultCanvasDimensions(); + } + + /** + * Checks for "iiif.image.width" metadata in IIIF bundles. When bitstream + * metadata is not found for the first image in the bundle this method updates the + * default canvas dimensions for the request based on the actual image dimensions, + * using the IIIF image service. Called once for each manifest. + * @param bundles IIIF bundles for this item + */ + protected void guessCanvasDimensions(Context context, List bundles) { + // prevent redundant updates. + boolean dimensionUpdated = false; + + for (Bundle bundle : bundles) { + if (!dimensionUpdated) { + for (Bitstream bitstream : bundle.getBitstreams()) { + if (utils.isIIIFBitstream(context, bitstream)) { + // check for width dimension + if (!utils.hasWidthMetadata(bitstream)) { + // get the dimensions of the image. + int[] imageDims = utils.getImageDimensions(bitstream); + if (imageDims != null && imageDims.length == 2) { + // update the fallback dimensions + defaultCanvasWidthFallback = imageDims[0]; + defaultCanvasHeightFallback = imageDims[1]; + } + setDefaultCanvasDimensions(); + // stop processing the bundles + dimensionUpdated = true; + } + // check only the first image + break; + } + } + } + } + } + + /** + * Sets the height and width dimensions for all images when "iiif.image.default-width" + * and "iiif.image.default-height" are set to -1 in DSpace configuration. The values + * are updated only when the bitstream does not have its own image dimension metadata. + * @param bitstream + */ + private void setCanvasDimensions(Bitstream bitstream) { + if (DEFAULT_CANVAS_HEIGHT == -1 && DEFAULT_CANVAS_WIDTH == -1) { + // When the default dimension is -1, update default dimensions when the + // image has no width metadata. + if (bitstream.getMetadata().stream().noneMatch(m -> m.getMetadataField().toString('.') + .contentEquals(METADATA_IMAGE_WIDTH))) { + int[] imageDims = utils.getImageDimensions(bitstream); + if (imageDims != null && imageDims.length == 2) { + // update the dynamic default dimensions for this bitstream + dynamicDefaultWidth = imageDims[0]; + dynamicDefaultHeight = imageDims[1]; + } + if (imageDims == null) { + // use fallback. + dynamicDefaultWidth = defaultCanvasWidthFallback; + dynamicDefaultHeight = defaultCanvasHeightFallback; + log.error("Unable to retrieve dimensions from the image server for: " + bitstream.getID() + + " Using default dimensions."); + } + } + } + } + + /** + * Use the dynamic default if the configured default width is -1. + * @return + */ + private int getDefaultWidth() { + if (DEFAULT_CANVAS_WIDTH == -1) { + return dynamicDefaultWidth; + } + return DEFAULT_CANVAS_WIDTH; + } + + /** + * Use the dynamic default if the configured default height is -1. + * @return + */ + private int getDefaultHeight() { + if (DEFAULT_CANVAS_HEIGHT == -1) { + return dynamicDefaultHeight; + } + return DEFAULT_CANVAS_HEIGHT; + } + + /** + * Creates a single {@code CanvasGenerator}. + * + * @param context DSpace Context + * @param manifestId manifest id + * @param bitstream DSpace bitstream + * @param bundle DSpace bundle + * @param item DSpace item + * @param count the canvas position in the sequence. + * @param mimeType bitstream mimetype + * @return a canvas generator + */ + protected CanvasGenerator getCanvas(Context context, String manifestId, Bitstream bitstream, Bundle bundle, + Item item, int count, String mimeType) { + int pagePosition = count + 1; + + String canvasNaming = utils.getCanvasNaming(item, I18nUtil.getMessage("iiif.canvas.default-naming")); + String label = utils.getIIIFLabel(bitstream, canvasNaming + " " + pagePosition); + + setCanvasDimensions(bitstream); + + int canvasWidth = utils.getCanvasWidth(bitstream, bundle, item, getDefaultWidth()); + int canvasHeight = utils.getCanvasHeight(bitstream, bundle, item, getDefaultHeight()); + UUID bitstreamId = bitstream.getID(); + ImageContentGenerator image = imageContentService.getImageContent(bitstreamId, mimeType, + imageUtil.getImageProfile(), IMAGE_PATH); + + ImageContentGenerator thumb = imageContentService.getImageContent(bitstreamId, mimeType, + thumbUtil.getThumbnailProfile(), THUMBNAIL_PATH); + + return addMetadata(context, bitstream, + new CanvasGenerator(IIIF_ENDPOINT + manifestId + "/canvas/c" + count) + .addImage(image.generateResource()).addThumbnail(thumb.generateResource()).setHeight(canvasHeight) + .setWidth(canvasWidth).setLabel(label)); + } + + /** + * Ranges expect the Canvas object to have only an identifier. + * + * @param startCanvas the start canvas identifier + * @return canvas generator + */ + protected CanvasGenerator getRangeCanvasReference(String startCanvas) { + return new CanvasGenerator(startCanvas); + } + + /** + * Adds metadata to canvas. + * @param context DSpace context + * @param bitstream DSpace bitstream + * @param canvasGenerator canvas generator + * @return canvas generator + */ + private CanvasGenerator addMetadata(Context context, Bitstream bitstream, CanvasGenerator canvasGenerator) { + BitstreamService bService = ContentServiceFactory.getInstance().getBitstreamService(); + for (String field : BITSTREAM_METADATA_FIELDS) { + if (StringUtils.startsWith(field, "@") && StringUtils.endsWith(field, "@")) { + String virtualFieldName = field.substring(1, field.length() - 1); + String beanName = BitstreamIIIFVirtualMetadata.IIIF_BITSTREAM_VIRTUAL_METADATA_BEAN_PREFIX + + virtualFieldName; + BitstreamIIIFVirtualMetadata virtual = applicationContext.getBean(beanName, + BitstreamIIIFVirtualMetadata.class); + List values = virtual.getValues(context, bitstream); + if (values.size() > 0) { + if (values.size() > 1) { + canvasGenerator.addMetadata("bitstream.iiif-virtual." + virtualFieldName, values.get(0), + values.subList(1, values.size()).toArray(new String[values.size() - 1])); + } else { + canvasGenerator.addMetadata("bitstream.iiif-virtual." + virtualFieldName, values.get(0)); + } + } + } else { + String[] eq = field.split("\\."); + String schema = eq[0]; + String element = eq[1]; + String qualifier = null; + if (eq.length > 2) { + qualifier = eq[2]; + } + List metadata = bService.getMetadata(bitstream, schema, element, qualifier, + Item.ANY); + List values = new ArrayList(); + for (MetadataValue meta : metadata) { + if (meta.getValue() != null) { + values.add(meta.getValue()); + } + } + if (values.size() > 0) { + if (values.size() > 1) { + canvasGenerator.addMetadata("bitstream." + field, values.get(0), + values.subList(1, values.size()).toArray(new String[values.size() - 1])); + } else { + canvasGenerator.addMetadata("bitstream." + field, values.get(0)); + } + } + } + } + return canvasGenerator; + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/ImageContentService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/ImageContentService.java new file mode 100644 index 000000000000..754e8b9bfabc --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/ImageContentService.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import java.util.UUID; + +import org.dspace.app.iiif.model.generator.ImageContentGenerator; +import org.dspace.app.iiif.model.generator.ImageServiceGenerator; +import org.dspace.app.iiif.model.generator.ProfileGenerator; +import org.dspace.services.ConfigurationService; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This service provides methods for creating a {@code Image Resource} annotation. There should be a single instance of + * this service per request. The {@code @RequestScope} provides a single instance created and available during + * complete lifecycle of the HTTP request. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class ImageContentService extends AbstractResourceService { + + + public ImageContentService(ConfigurationService configurationService) { + setConfiguration(configurationService); + } + + /** + * Association of images with their respective canvases is done via annotations. The Open Annotation model + * allows any resource to be associated with any other resource, or parts thereof, and it is reused for + * both commentary and painting resources on the canvas. + * @param uuid bitstream uuid + * @param mimetype bitstream mimetype + * @param profile the service profile + * @param path the path component of the identifier + * @return + */ + protected ImageContentGenerator getImageContent(UUID uuid, String mimetype, ProfileGenerator profile, String path) { + return new ImageContentGenerator(IMAGE_SERVICE + uuid + path) + .setFormat(mimetype) + .addService(getImageService(profile, uuid.toString())); + } + + protected ImageContentGenerator getImageContent(String identifier) { + return new ImageContentGenerator(identifier); + } + + /** + * A link to a service that makes more functionality available for the resource, + * like the Image API service. + * + * @param profile service profile + * @param uuid id of the image bitstream + * @return object representing the Image Service + */ + private ImageServiceGenerator getImageService(ProfileGenerator profile, String uuid) { + return new ImageServiceGenerator(IMAGE_SERVICE + uuid).setProfile(profile); + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/ManifestService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/ManifestService.java new file mode 100644 index 000000000000..09526deeb6cb --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/ManifestService.java @@ -0,0 +1,341 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; +import org.dspace.app.iiif.model.generator.CanvasGenerator; +import org.dspace.app.iiif.model.generator.ContentSearchGenerator; +import org.dspace.app.iiif.model.generator.ExternalLinksGenerator; +import org.dspace.app.iiif.model.generator.ImageContentGenerator; +import org.dspace.app.iiif.model.generator.ManifestGenerator; +import org.dspace.app.iiif.model.generator.RangeGenerator; +import org.dspace.app.iiif.service.utils.IIIFUtils; +import org.dspace.app.util.service.MetadataExposureService; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This service creates the manifest. There should be a single instance of this service per request. + * The {@code @RequestScope} provides a single instance created and available during complete lifecycle + * of the HTTP request. This is needed because some configurations are cached in the + * instance. Moreover, many injected dependencies are also request scoped or + * prototype (that will turn in a request scope when injected in a request scope + * bean). The generators for top-level domain objects need to be request scoped as they act as a builder + * storing the object state during each incremental building step until the final object is returned (IIIF Resource). + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class ManifestService extends AbstractResourceService { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ManifestService.class); + + @Autowired + protected ItemService itemService; + + @Autowired + CanvasService canvasService; + + @Autowired + RangeService rangeService; + + @Autowired + SequenceService sequenceService; + + @Autowired + RelatedService relatedService; + + @Autowired + SeeAlsoService seeAlsoService; + + @Autowired + ImageContentService imageContentService; + + @Autowired + IIIFUtils utils; + + @Autowired + ContentSearchGenerator contentSearchGenerator; + + @Autowired + ManifestGenerator manifestGenerator; + + @Autowired + MetadataExposureService metadataExposureService; + + protected String[] METADATA_FIELDS; + + /** + * Estimate image dimension metadata. + */ + boolean guessCanvasDimension; + + /** + * Constructor. + * @param configurationService the DSpace configuration service. + */ + public ManifestService(ConfigurationService configurationService) { + setConfiguration(configurationService); + METADATA_FIELDS = configurationService.getArrayProperty("iiif.metadata.item"); + } + + /** + * Returns JSON manifest response for a DSpace item. + * + * @param item the DSpace Item + * @param context the DSpace context + * @return manifest as JSON + */ + public String getManifest(Item item, Context context) { + // If default dimensions are provided via configuration do not guess the default dimension. + String wid = configurationService.getProperty("iiif.canvas.default-width"); + String hgt = configurationService.getProperty("iiif.canvas.default-height"); + guessCanvasDimension = (wid == null && hgt == null); + populateManifest(item, context); + return utils.asJson(manifestGenerator.generateResource()); + } + + /** + * Populates the manifest for a DSpace Item. + * + * @param item the DSpace Item + * @param context the DSpace context + * @return manifest domain object + */ + private void populateManifest(Item item, Context context) { + String manifestId = getManifestId(item.getID()); + manifestGenerator.setIdentifier(manifestId); + manifestGenerator.setLabel(item.getName()); + setLogoContainer(); + addRelated(item); + addSearchService(item); + addMetadata(context, item); + addViewingHint(item); + addThumbnail(item, context); + addCanvasAndRange(context, item, manifestId); + manifestGenerator.addSequence( + sequenceService.getSequence(item)); + addRendering(item, context); + addSeeAlso(item); + } + + /** + * Add the ranges to the manifest structure. Ranges are generated from the + * iiif.toc metadata + * + * @param context the DSpace Context + * @param item the DSpace Item to represent + * @param manifestId the generated manifestId + */ + private void addCanvasAndRange(Context context, Item item, String manifestId) { + + // Set the root Range for this manifest. + rangeService.setRootRange(manifestId); + // Get bundles that contain manifest data. + List bundles = utils.getIIIFBundles(item); + // Set the default canvas dimensions. + if (guessCanvasDimension) { + canvasService.guessCanvasDimensions(context, bundles); + } + for (Bundle bnd : bundles) { + String bundleToCPrefix = null; + if (bundles.size() > 1) { + // Check for bundle Range metadata if multiple IIIF bundles exist. + bundleToCPrefix = utils.getBundleIIIFToC(bnd); + } + for (Bitstream bitstream : utils.getIIIFBitstreams(context, bnd)) { + // Add the Canvas to the Sequence. + CanvasGenerator canvas = sequenceService.addCanvas(context, item, bnd, bitstream); + // Update the Ranges. + rangeService.updateRanges(bitstream, bundleToCPrefix, canvas); + } + } + // If Ranges were created, add them to manifest. + Map tocRanges = rangeService.getTocRanges(); + if (tocRanges != null && tocRanges.size() > 0) { + RangeGenerator rootRange = rangeService.getRootRange(); + manifestGenerator.addRange(rootRange); + for (RangeGenerator range : tocRanges.values()) { + manifestGenerator.addRange(range); + } + } + } + + /** + * Adds DSpace Item metadata to the manifest. + * + * @param context the DSpace Context + * @param item the DSpace item + */ + private void addMetadata(Context context, Item item) { + for (String field : METADATA_FIELDS) { + String[] eq = field.split("\\."); + String schema = eq[0]; + String element = eq[1]; + String qualifier = null; + if (eq.length > 2) { + qualifier = eq[2]; + } + List metadata = item.getItemService().getMetadata(item, schema, element, qualifier, + Item.ANY); + List values = new ArrayList(); + for (MetadataValue meta : metadata) { + // we need to perform the check here as the configuration can include jolly + // characters (i.e. dc.description.*) and we need to be sure to hide qualified + // metadata (dc.description.provenance) + try { + if (metadataExposureService.isHidden(context, meta.getMetadataField().getMetadataSchema().getName(), + meta.getMetadataField().getElement(), meta.getMetadataField().getQualifier())) { + continue; + } + } catch (SQLException e) { + throw new RuntimeException(e); + } + values.add(meta.getValue()); + } + if (values.size() > 0) { + if (values.size() > 1) { + manifestGenerator.addMetadata(field, values.get(0), + values.subList(1, values.size()).toArray(new String[values.size() - 1])); + } else { + manifestGenerator.addMetadata(field, values.get(0)); + } + } + } + String descrValue = item.getItemService().getMetadataFirstValue(item, "dc", "description", null, Item.ANY); + if (StringUtils.isNotBlank(descrValue)) { + manifestGenerator.addDescription(descrValue); + } + + String licenseUriValue = item.getItemService().getMetadataFirstValue(item, "dc", "rights", "uri", Item.ANY); + if (StringUtils.isNotBlank(licenseUriValue)) { + manifestGenerator.addLicense(licenseUriValue); + } + } + + /** + * Adds a related item property to the manifest. The property provides a link + * to the Item record in the DSpace Angular UI. + * + * @param item the DSpace Item + */ + private void addRelated(Item item) { + manifestGenerator.addRelated(relatedService.getRelated(item)); + } + + /** + * Adds a viewing hint to the manifest. This is a hint to the client as to the most + * appropriate method of displaying the resource. + * + * @param item the DSpace Item + */ + private void addViewingHint(Item item) { + manifestGenerator.addViewingHint(utils.getIIIFViewingHint(item, DOCUMENT_VIEWING_HINT)); + } + + /** + * This method adds into the manifest a {@code seeAlso} reference to additional + * resources found in the Item bundle(s). A typical use case would be METS / ALTO files + * that describe the resource. + * + * @param item the DSpace Item. + */ + private void addSeeAlso(Item item) { + manifestGenerator.addSeeAlso(seeAlsoService.getSeeAlso(item)); + } + + /** + * This method adds a search service definition to the manifest when + * the item metadata includes {@code iiif.search.enabled}. + * + * @param item the DSpace Item + */ + private void addSearchService(Item item) { + if (utils.isSearchable(item)) { + contentSearchGenerator.setIdentifier(IIIF_ENDPOINT + item.getID() + "/manifest/search"); + manifestGenerator.addService(contentSearchGenerator); + } + } + + /** + * Adds thumbnail to the manifest. Uses first image in the manifest. + * @param item the DSpace Item + * @param context DSpace context + */ + private void addThumbnail(Item item, Context context) { + List bitstreams = utils.getIIIFBitstreams(context, item); + if (bitstreams != null && bitstreams.size() > 0) { + String mimeType = utils.getBitstreamMimeType(bitstreams.get(0), context); + ImageContentGenerator image = imageContentService + .getImageContent(bitstreams.get(0).getID(), mimeType, + thumbUtil.getThumbnailProfile(), THUMBNAIL_PATH); + manifestGenerator.addThumbnail(image); + } + } + + /** + * Adds the logo to the manifest when it is defined in DSpace configuration. + */ + private void setLogoContainer() { + if (IIIF_LOGO_IMAGE != null) { + ImageContentGenerator logo = new ImageContentGenerator(IIIF_LOGO_IMAGE); + manifestGenerator.addLogo(logo); + } + } + + /** + * This method looks for a PDF in the Item's ORIGINAL bundle and adds + * it as the Rendering resource if found. + * + * @param item DSpace Item + * @param context DSpace context + */ + private void addRendering(Item item, Context context) { + List bundles = utils.getIIIFBundles(item); + for (Bundle bundle : bundles) { + List bitstreams = bundle.getBitstreams(); + for (Bitstream bitstream : bitstreams) { + String mimeType = null; + try { + mimeType = bitstream.getFormat(context).getMIMEType(); + } catch (SQLException e) { + e.printStackTrace(); + } + // If the bundle contains a PDF, assume that it represents the + // item and add to rendering. Ignore other mime-types. Other options + // might be using the primary bitstream or relying on a bitstream metadata + // field, e.g. iiif.rendering + if (mimeType != null && mimeType.contentEquals("application/pdf")) { + String id = BITSTREAM_PATH_PREFIX + "/" + bitstream.getID() + "/content"; + manifestGenerator.addRendering( + new ExternalLinksGenerator(id) + .setLabel(utils.getIIIFLabel(bitstream, bitstream.getName())) + .setFormat(mimeType) + ); + } + } + } + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/RangeService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/RangeService.java new file mode 100644 index 000000000000..a1e85f104e02 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/RangeService.java @@ -0,0 +1,148 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.dspace.app.iiif.model.generator.CanvasGenerator; +import org.dspace.app.iiif.model.generator.RangeGenerator; +import org.dspace.app.iiif.service.utils.IIIFUtils; +import org.dspace.content.Bitstream; +import org.dspace.core.I18nUtil; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This service provides methods for creating a {@code Range}. There should be a single instance of this service + * per request. The {@code @RequestScope} provides a single instance created and available during complete lifecycle + * of the HTTP request. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class RangeService extends AbstractResourceService { + + @Autowired + CanvasService canvasService; + + private Map tocRanges = new LinkedHashMap(); + private RangeGenerator currentRange; + private RangeGenerator root; + + + public RangeService(ConfigurationService configurationService) { + setConfiguration(configurationService); + } + + /** + * Get the root range generator. This will contain table of contents entries. + * @return + */ + public RangeGenerator getRootRange() { + return root; + } + + /** + * Sets the root range generator to which sub-ranges will be added. + * @param manifestId id of the manifest to which ranges will be added. + */ + public void setRootRange(String manifestId) { + root = new RangeGenerator(this); + root.addViewingHint("top"); + root.setLabel(I18nUtil.getMessage("iiif.toc.root-label")); + root.setIdentifier(manifestId + "/range/r0"); + } + + /** + * Gets the current ranges. + * @return map of toc ranges. + */ + public Map getTocRanges() { + return this.tocRanges; + } + + /** + * Updates the current range and adds sub-ranges. + * @param bitstream bitstream DSO + * @param bundleToCPrefix range prefix from bundle metadata + * @param canvas the current canvas generator + */ + public void updateRanges(Bitstream bitstream, String bundleToCPrefix, CanvasGenerator canvas) { + List tocs = utils.getIIIFToCs(bitstream, bundleToCPrefix); + if (tocs.size() > 0) { + // Add a new Range. + addTocRange(tocs, canvas); + } else { + // Add canvases to the current Range. + if (tocRanges.size() > 0) { + String canvasIdentifier = canvas.getIdentifier(); + CanvasGenerator simpleCanvas = canvasService.getRangeCanvasReference(canvasIdentifier); + currentRange.addCanvas(simpleCanvas); + } + } + } + + /** + * Adds sub-ranges to the root Range. If the toc metadata includes a separator, + * hierarchical sub-ranges are created. + * @param tocs ranges from toc metadata + * @param canvasGenerator generator for the current canvas + * @return + */ + private void addTocRange(List tocs , CanvasGenerator canvasGenerator) { + + for (String toc : tocs) { + // Make tempRange a reference to root. + RangeGenerator tempRange = root; + String[] parts = toc.split(IIIFUtils.TOC_SEPARATOR_REGEX); + String key = ""; + // Process sub-ranges. + for (int pIdx = 0; pIdx < parts.length; pIdx++) { + if (pIdx > 0) { + key += IIIFUtils.TOC_SEPARATOR; + } + key += parts[pIdx]; + if (tocRanges.get(key) != null) { + // Handles the case of a bitstream that crosses two ranges. + tempRange = tocRanges.get(key); + } else { + RangeGenerator range = new RangeGenerator(this); + range.setLabel(parts[pIdx]); + // Add sub-range to the root Range + tempRange.addSubRange(range); + // Add new sub-range to the map. + tocRanges.put(key, range); + // Make tempRange a reference to the new sub-range. + tempRange = range; + } + } + // Add a simple canvas reference to the Range. + tempRange + .addCanvas(canvasService.getRangeCanvasReference(canvasGenerator.getIdentifier())); + + // Update the current Range. + currentRange = tempRange; + } + } + + /** + * Ranges expect the sub-range to have only an identifier. + * + * @param range the sub-range to reference + * @return RangeGenerator able to create the reference + */ + public RangeGenerator getRangeReference(RangeGenerator range) { + return new RangeGenerator(this).setIdentifier(range.getIdentifier()); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/RelatedService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/RelatedService.java new file mode 100644 index 000000000000..a29c6466b62b --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/RelatedService.java @@ -0,0 +1,40 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import org.dspace.app.iiif.model.generator.ExternalLinksGenerator; +import org.dspace.content.Item; +import org.dspace.services.ConfigurationService; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This service provides methods for creating a {@code related} annotation. There should be a single instance of + * this service per request. The {@code @RequestScope} provides a single instance created and available during + * complete lifecycle of the HTTP request. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class RelatedService extends AbstractResourceService { + + private static final String RELATED_ITEM_LABEL = "DSpace item view"; + + public RelatedService(ConfigurationService configurationService) { + setConfiguration(configurationService); + } + + public ExternalLinksGenerator getRelated(Item item) { + String url = CLIENT_URL + "/items/" + item.getID(); + return new ExternalLinksGenerator(url) + .setFormat("text/html") + .setLabel(RELATED_ITEM_LABEL); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SearchAnnotationService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SearchAnnotationService.java new file mode 100644 index 000000000000..15f0879c5f49 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SearchAnnotationService.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import java.util.UUID; + +/** + * Interface for IIIF Search API implementations. + */ +public interface SearchAnnotationService { + + /** + * Initializes required values. + * + * @param endpoint the iiif service endpoint + * @param manifestId the id of the manifest to search within + */ + void initializeQuerySettings(String endpoint, String manifestId); + + /** + * Executes the Search API solr query and returns iiif search result + * annotations. + * + * @param query encoded query terms + * @return iiif json response + */ + String getSearchResponse(UUID uuid, String query); + + /** + * Tests to see if the plugin is configured in iiif.cfg. + * + * @param className the canonical name of class + * @return true if provided value matches plugin class name + */ + boolean useSearchPlugin(String className); + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SearchService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SearchService.java new file mode 100644 index 000000000000..be039477b5f4 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SearchService.java @@ -0,0 +1,64 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import java.util.List; +import java.util.UUID; + +import org.apache.logging.log4j.Logger; +import org.dspace.app.iiif.exception.NotImplementedException; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This service provides methods for creating {@code Search API} response. There should be a single instance of + * this service per request. The {@code @RequestScope} provides a single instance created and available during + * complete lifecycle of the HTTP request. + */ +@RequestScope +@Component +public class SearchService extends AbstractResourceService { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SearchService.class); + + private final String searchPlugin; + + @Autowired + List annotationService; + + public SearchService(ConfigurationService configurationService) { + setConfiguration(configurationService); + // The search service to use is defined in dspace configuration. + searchPlugin = configurationService.getProperty("iiif.search.plugin"); + } + + /** + * Executes a search query for items in the current manifest. A + * search plugin must be enabled. + * + * @param uuid dspace item uuid + * @param query the solr query + * @return IIIF search result with page coordinate annotations. + */ + public String searchWithinManifest(UUID uuid, String query) throws NotImplementedException { + if (searchPlugin != null) { + for (SearchAnnotationService service : annotationService) { + if (service.useSearchPlugin(searchPlugin)) { + service.initializeQuerySettings(IIIF_ENDPOINT, getManifestId(uuid)); + return service.getSearchResponse(uuid, query); + } + } + } + throw new NotImplementedException( + "The IIIF search option is not enabled for this server." + ); + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SeeAlsoService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SeeAlsoService.java new file mode 100644 index 000000000000..f4bd8c0348b2 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SeeAlsoService.java @@ -0,0 +1,41 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import org.dspace.app.iiif.model.generator.AnnotationGenerator; +import org.dspace.app.iiif.model.generator.ExternalLinksGenerator; +import org.dspace.content.Item; +import org.dspace.services.ConfigurationService; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This service provides methods for creating {@code seAlso} external link. There should be a single instance of + * this service per request. The {@code @RequestScope} provides a single instance created and available during + * complete lifecycle of the HTTP request. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class SeeAlsoService extends AbstractResourceService { + + private static final String SEE_ALSO_LABEL = "More descriptions of this resource"; + + public SeeAlsoService(ConfigurationService configurationService) { + setConfiguration(configurationService); + } + + public ExternalLinksGenerator getSeeAlso(Item item) { + return new ExternalLinksGenerator(IIIF_ENDPOINT + item.getID() + "/manifest/seeAlso") + .setType(AnnotationGenerator.TYPE) + .setLabel(SEE_ALSO_LABEL); + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SequenceService.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SequenceService.java new file mode 100644 index 000000000000..7914ae11009d --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/SequenceService.java @@ -0,0 +1,85 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import org.apache.logging.log4j.Logger; +import org.dspace.app.iiif.model.generator.CanvasGenerator; +import org.dspace.app.iiif.model.generator.CanvasItemsGenerator; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.web.context.annotation.RequestScope; + +/** + * This service provides methods for creating a {@code Sequence}. There should be a single instance of + * this service per request. The {@code @RequestScope} provides a single instance created and available during + * complete lifecycle of the HTTP request. + * + * @author Michael Spalti mspalti@willamette.edu + * @author Andrea Bollini (andrea.bollini at 4science.it) + */ +@RequestScope +@Component +public class SequenceService extends AbstractResourceService { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SequenceService.class); + + /* + * The counter tracks the position of the bitstream in the list and is used to create the canvas identifier. + * The order of bitstreams (and thus page order in documents) is determined by position in the DSpace + * bundle. + */ + int counter = 0; + + @Autowired + CanvasItemsGenerator sequenceGenerator; + + @Autowired + CanvasService canvasService; + + public SequenceService(ConfigurationService configurationService) { + setConfiguration(configurationService); + } + + /** + * Returns a sequence generator that has been configured with canvases. (@abollini will update.) + * + * @param item the DSpace item + * @return a sequence generator + */ + public CanvasItemsGenerator getSequence(Item item) { + + sequenceGenerator.setIdentifier(IIIF_ENDPOINT + item.getID() + "/sequence/s0"); + return sequenceGenerator; + } + + /** + * This method adds a canvas to the sequence for each item in the list of DSpace bitstreams. + * Bitstreams must be on image mime type. (@abollini will update.) + * + * @param context the DSpace context + * @param item the DSpace Item + * @param bnd a DSpace bundle + * @param bitstream a DSpace bitstream + */ + public CanvasGenerator addCanvas(Context context, Item item, Bundle bnd, Bitstream bitstream) { + String mimeType = utils.getBitstreamMimeType(bitstream, context); + String manifestId = item.getID().toString(); + CanvasGenerator canvasGenerator = + canvasService.getCanvas(context, manifestId, bitstream, bnd, item, counter, mimeType); + sequenceGenerator.addCanvas(canvasGenerator); + counter++; + return canvasGenerator; + } + + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java new file mode 100644 index 000000000000..9e6022548dbe --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java @@ -0,0 +1,294 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service; + +import java.io.IOException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.validator.routines.UrlValidator; +import org.apache.logging.log4j.Logger; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.impl.NoOpResponseParser; +import org.apache.solr.client.solrj.request.QueryRequest; +import org.apache.solr.common.params.CommonParams; +import org.apache.solr.common.util.NamedList; +import org.dspace.app.iiif.model.generator.AnnotationGenerator; +import org.dspace.app.iiif.model.generator.CanvasGenerator; +import org.dspace.app.iiif.model.generator.ContentAsTextGenerator; +import org.dspace.app.iiif.model.generator.ManifestGenerator; +import org.dspace.app.iiif.model.generator.SearchResultGenerator; +import org.dspace.app.iiif.service.utils.IIIFUtils; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Scope; +import org.springframework.stereotype.Component; + + +/** + * This service implements methods for executing a solr search and creating IIIF search result annotations. + *

    + * https://github.com/dbmdz/solr-ocrhighlighting + */ +@Scope("prototype") +@Component +public class WordHighlightSolrSearch implements SearchAnnotationService { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(WordHighlightSolrSearch.class); + + private String endpoint; + private String manifestId; + + @Autowired + IIIFUtils utils; + + @Autowired + ContentAsTextGenerator contentAsText; + + @Autowired + SearchResultGenerator searchResult; + + @Autowired + ManifestGenerator manifestGenerator; + + + @Override + public boolean useSearchPlugin(String className) { + return className.contentEquals(WordHighlightSolrSearch.class.getCanonicalName()); + } + + @Override + public void initializeQuerySettings(String endpoint, String manifestId) { + this.endpoint = endpoint; + this.manifestId = manifestId; + } + + @Override + public String getSearchResponse(UUID uuid, String query) { + String json = ""; + ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + String solrService = configurationService.getProperty("iiif.search.url"); + boolean validationEnabled = configurationService + .getBooleanProperty("discovery.solr.url.validation.enabled"); + UrlValidator urlValidator = new UrlValidator(UrlValidator.ALLOW_LOCAL_URLS); + if (urlValidator.isValid(solrService) || validationEnabled) { + HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService).build(); + solrServer.setUseMultiPartPost(true); + SolrQuery solrQuery = getSolrQuery(adjustQuery(query), manifestId); + QueryRequest req = new QueryRequest(solrQuery); + // returns raw json response. + req.setResponseParser(new NoOpResponseParser("json")); + NamedList resp; + try { + resp = solrServer.request(req); + json = (String) resp.get("response"); + } catch (SolrServerException | IOException e) { + throw new RuntimeException("Unable to retrieve search response.", e); + } + } else { + log.error("Error while initializing solr, invalid url: " + solrService); + } + return getAnnotationList(uuid, json, query); + } + + /** + * Wraps multi-word queries in parens. + * @param query the search query + * @return + */ + private String adjustQuery(String query) { + if (query.split(" ").length > 1) { + return '(' + query + ')'; + } + return query; + } + + /** + * Constructs a solr search URL. Compatible with solr-ocrhighlighting-0.7.2. + * https://github.com/dbmdz/solr-ocrhighlighting/releases/tag/0.7.2 + * + * @param query the search terms + * @param manifestId the id of the manifest in which to search + * @return solr query + */ + private SolrQuery getSolrQuery(String query, String manifestId) { + SolrQuery solrQuery = new SolrQuery(); + solrQuery.set("q", "ocr_text:" + query + " AND manifest_url:\"" + manifestId + "\""); + solrQuery.set(CommonParams.WT, "json"); + solrQuery.set("hl", "true"); + solrQuery.set("hl.ocr.fl", "ocr_text"); + solrQuery.set("hl.ocr.contextBlock", "line"); + solrQuery.set("hl.ocr.contextSize", "2"); + solrQuery.set("hl.snippets", "8192"); + solrQuery.set("hl.ocr.maxPassages", "8192"); + solrQuery.set("hl.ocr.trackPages", "on"); + solrQuery.set("hl.ocr.limitBlock","page"); + solrQuery.set("hl.ocr.absoluteHighlights", "true"); + + return solrQuery; + } + + /** + * Generates a Search API response from the word_highlighting solr query response. + * + * The function assumes that the solr query responses contains page IDs + * (taken from the ALTO Page ID element) in the following format: + * Page.0, Page.1, Page.2.... + * + * The identifier values must be aligned with zero-based IIIF canvas identifiers: + * c0, c1, c2.... + * + * This convention must be followed when indexing ALTO files into the word_highlighting + * solr index. If it is not followed, word highlights will not align canvases. + * + * @param json solr search result + * @param query the solr query + * @return a search response in JSON + */ + private String getAnnotationList(UUID uuid, String json, String query) { + searchResult.setIdentifier(manifestId + "/search?q=" + + URLEncoder.encode(query, StandardCharsets.UTF_8)); + + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + // If error occurred or no body, return immediately + if (body == null) { + return utils.asJson(searchResult.generateResource()); + } + + // Example structure of Solr response available at + // https://github.com/dbmdz/solr-ocrhighlighting/blob/main/docs/query.md + // Get the outer ocrHighlighting node + JsonNode highs = body.get("ocrHighlighting"); + if (highs != null) { + // Loop through each highlight entry under ocrHighlighting + for (final JsonNode highEntry : highs) { + // Get the ocr_text node under the entry + JsonNode ocrNode = highEntry.get("ocr_text"); + if (ocrNode != null) { + // Loop through the snippets array under that + for (final JsonNode snippet : ocrNode.get("snippets")) { + if (snippet != null) { + // Get a canvas ID based on snippet's pages + String pageId = getCanvasId(snippet.get("pages")); + if (pageId != null) { + // Loop through array of highlights for each snippet. + for (final JsonNode highlights : snippet.get("highlights")) { + if (highlights != null) { + // May be multiple word highlights on a page, so loop through them. + for (int i = 0; i < highlights.size(); i++) { + // Add annotation associated with each highlight + AnnotationGenerator anno = getAnnotation(highlights.get(i), pageId, uuid); + if (anno != null) { + searchResult.addResource(anno); + } + } + } + } + } + } + } + } + } + } + + return utils.asJson(searchResult.generateResource()); + } + + /** + * Returns the annotation generator for the highlight. + * @param highlight highlight node from Solr response + * @param pageId page id from solr response + * @return generator for a single annotation + */ + private AnnotationGenerator getAnnotation(JsonNode highlight, String pageId, UUID uuid) { + String text = highlight.get("text") != null ? highlight.get("text").asText() : null; + int ulx = highlight.get("ulx") != null ? highlight.get("ulx").asInt() : -1; + int uly = highlight.get("uly") != null ? highlight.get("uly").asInt() : -1; + int lrx = highlight.get("lrx") != null ? highlight.get("lrx").asInt() : -1; + int lry = highlight.get("lry") != null ? highlight.get("lry").asInt() : -1; + String w = (lrx >= 0 && ulx >= 0) ? Integer.toString(lrx - ulx) : null; + String h = (lry >= 0 && uly >= 0) ? Integer.toString(lry - uly) : null; + + if (text != null && w != null && h != null) { + String params = ulx + "," + uly + "," + w + "," + h; + return createSearchResultAnnotation(params, text, pageId, uuid); + } + return null; + } + + /** + * Returns position of canvas. Uses the "pages" id attribute. + * This method assumes that the solr response includes a "page" id attribute that is + * delimited with a "." and that the integer corresponds to the + * canvas identifier in the manifest. For METS/ALTO documents, the page + * order can be derived from the METS file when loading the solr index. + * @param pagesNode the pages node + * @return canvas id or null if node was null + */ + private String getCanvasId(JsonNode pagesNode) { + if (pagesNode != null) { + JsonNode page = pagesNode.get(0); + if (page != null) { + JsonNode pageId = page.get("id"); + if (pageId != null) { + String[] identArr = pageId.asText().split("\\."); + // the canvas id. + return "c" + identArr[1]; + } + } + } + return null; + } + + /** + * Creates annotation with word highlight coordinates. + * + * @param params word coordinate parameters used for highlighting. + * @param text word text + * @param pageId the page id returned by solr + * @param uuid the dspace item identifier + * @return a single annotation object that contains word highlights on a single page (canvas) + */ + private AnnotationGenerator createSearchResultAnnotation(String params, String text, String pageId, UUID uuid) { + String annotationIdentifier = this.endpoint + uuid + "/annot/" + pageId + "-" + params; + String canvasIdentifier = this.endpoint + uuid + "/canvas/" + pageId + "#xywh=" + params; + contentAsText.setText(text); + CanvasGenerator canvas = new CanvasGenerator(canvasIdentifier); + + AnnotationGenerator annotationGenerator = new AnnotationGenerator(annotationIdentifier, + AnnotationGenerator.PAINTING) + .setOnCanvas(canvas) + .setResource(contentAsText) + .setWithin(getWithinManifest()); + + return annotationGenerator; + } + + private List getWithinManifest() { + List withinList = new ArrayList<>(); + manifestGenerator.setIdentifier(manifestId); + withinList.add(manifestGenerator); + return withinList; + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamBytesIIIFVirtualMetadata.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamBytesIIIFVirtualMetadata.java new file mode 100644 index 000000000000..e042ef3e6b23 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamBytesIIIFVirtualMetadata.java @@ -0,0 +1,31 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service.utils; + +import java.util.Collections; +import java.util.List; + +import org.apache.commons.io.FileUtils; +import org.dspace.content.Bitstream; +import org.dspace.core.Context; +import org.springframework.stereotype.Component; + +/** + * Expose the Bitstream file size as a IIIF Metadata + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +@Component(BitstreamIIIFVirtualMetadata.IIIF_BITSTREAM_VIRTUAL_METADATA_BEAN_PREFIX + "bytes") +public class BitstreamBytesIIIFVirtualMetadata implements BitstreamIIIFVirtualMetadata { + + @Override + public List getValues(Context context, Bitstream bitstream) { + return Collections.singletonList(FileUtils.byteCountToDisplaySize(bitstream.getSizeBytes())); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamChecksumIIIFVirtualMetadata.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamChecksumIIIFVirtualMetadata.java new file mode 100644 index 000000000000..d7920bdc4b23 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamChecksumIIIFVirtualMetadata.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service.utils; + +import java.util.Collections; +import java.util.List; + +import org.dspace.content.Bitstream; +import org.dspace.core.Context; +import org.springframework.stereotype.Component; + +/** + * Expose the Bitstream Checksum as a IIIF Metadata + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +@Component(BitstreamIIIFVirtualMetadata.IIIF_BITSTREAM_VIRTUAL_METADATA_BEAN_PREFIX + "checksum") +public class BitstreamChecksumIIIFVirtualMetadata implements BitstreamIIIFVirtualMetadata { + + @Override + public List getValues(Context context, Bitstream bitstream) { + return Collections.singletonList(bitstream.getChecksum() + " (" + bitstream.getChecksumAlgorithm() + ")"); + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamFormatIIIFVirtualMetadata.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamFormatIIIFVirtualMetadata.java new file mode 100644 index 000000000000..c4caf0c54e60 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamFormatIIIFVirtualMetadata.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service.utils; + +import java.sql.SQLException; +import java.util.Collections; +import java.util.List; + +import org.dspace.content.Bitstream; +import org.dspace.core.Context; +import org.springframework.stereotype.Component; + +/** + * Expose the Bitstream Format as a IIIF Metadata + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +@Component(BitstreamIIIFVirtualMetadata.IIIF_BITSTREAM_VIRTUAL_METADATA_BEAN_PREFIX + "format") +public class BitstreamFormatIIIFVirtualMetadata implements BitstreamIIIFVirtualMetadata { + + @Override + public List getValues(Context context, Bitstream bitstream) { + try { + return Collections.singletonList(bitstream.getFormatDescription(context)); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamIIIFVirtualMetadata.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamIIIFVirtualMetadata.java new file mode 100644 index 000000000000..46fee60c87fa --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamIIIFVirtualMetadata.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service.utils; + +import java.util.List; + +import org.dspace.content.Bitstream; +import org.dspace.core.Context; + +/** + * Interface to implement to expose additional information at the canvas level + * for the bitstream + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +public interface BitstreamIIIFVirtualMetadata { + public final String IIIF_BITSTREAM_VIRTUAL_METADATA_BEAN_PREFIX = "iiif.bitstream."; + + List getValues(Context context, Bitstream bitstream); +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamMimetypeIIIFVirtualMetadata.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamMimetypeIIIFVirtualMetadata.java new file mode 100644 index 000000000000..b49b29743ffd --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/BitstreamMimetypeIIIFVirtualMetadata.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service.utils; + +import java.sql.SQLException; +import java.util.Collections; +import java.util.List; + +import org.dspace.content.Bitstream; +import org.dspace.core.Context; +import org.springframework.stereotype.Component; + +/** + * Expose the Bitstream format mime type as a IIIF Metadata + * + * @author Andrea Bollini (andrea.bollini at 4science.it) + * + */ +@Component(BitstreamIIIFVirtualMetadata.IIIF_BITSTREAM_VIRTUAL_METADATA_BEAN_PREFIX + "mimetype") +public class BitstreamMimetypeIIIFVirtualMetadata implements BitstreamIIIFVirtualMetadata { + + @Override + public List getValues(Context context, Bitstream bitstream) { + try { + return Collections.singletonList(bitstream.getFormat(context).getMIMEType()); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/IIIFUtils.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/IIIFUtils.java new file mode 100644 index 000000000000..782a5a985292 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/IIIFUtils.java @@ -0,0 +1,460 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service.utils; + +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_HEIGHT_QUALIFIER; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_IMAGE_ELEMENT; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_SCHEMA; +import static org.dspace.iiif.util.IIIFSharedUtils.METADATA_IIIF_WIDTH_QUALIFIER; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import de.digitalcollections.iiif.model.sharedcanvas.Resource; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; +import org.dspace.app.iiif.model.ObjectMapperFactory; +import org.dspace.content.Bitstream; +import org.dspace.content.BitstreamFormat; +import org.dspace.content.Bundle; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Context; +import org.dspace.iiif.IIIFApiQueryService; +import org.dspace.iiif.util.IIIFSharedUtils; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.annotation.Cacheable; +import org.springframework.stereotype.Component; + +@Component +public class IIIFUtils { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(IIIFUtils.class); + + // The DSpace bundle for other content related to item. + protected static final String OTHER_CONTENT_BUNDLE = "OtherContent"; + + // The canvas position will be appended to this string. + private static final String CANVAS_PATH_BASE = "/canvas/c"; + + // metadata used to enable the iiif features on the item + public static final String METADATA_IIIF_ENABLED = "dspace.iiif.enabled"; + // metadata used to enable the iiif search service on the item + public static final String METADATA_IIIF_SEARCH_ENABLED = "iiif.search.enabled"; + // metadata used to override the title/name exposed as label to iiif client + public static final String METADATA_IIIF_LABEL = "iiif.label"; + // metadata used to override the description/abstract exposed as label to iiif client + public static final String METADATA_IIIF_DESCRIPTION = "iiif.description"; + // metadata used to set the position of the resource in the iiif manifest structure + public static final String METADATA_IIIF_TOC = "iiif.toc"; + // metadata used to set the naming convention (prefix) used for all canvas that has not an explicit name + public static final String METADATA_IIIF_CANVAS_NAMING = "iiif.canvas.naming"; + // metadata used to set the iiif viewing hint + public static final String METADATA_IIIF_VIEWING_HINT = "iiif.viewing.hint"; + // metadata used to set the width of the canvas that has not an explicit name + public static final String METADATA_IMAGE_WIDTH = METADATA_IIIF_SCHEMA + "." + METADATA_IIIF_IMAGE_ELEMENT + + "." + METADATA_IIIF_WIDTH_QUALIFIER; + // metadata used to set the height of the canvas that has not an explicit name + public static final String METADATA_IMAGE_HEIGHT = METADATA_IIIF_SCHEMA + "." + METADATA_IIIF_IMAGE_ELEMENT + + "." + METADATA_IIIF_HEIGHT_QUALIFIER; + + // string used in the metadata toc as separator among the different levels + public static final String TOC_SEPARATOR = "|||"; + // convenient constant to split a toc in its components + public static final String TOC_SEPARATOR_REGEX = "\\|\\|\\|"; + + // get module subclass. + protected SimpleModule iiifModule = ObjectMapperFactory.getIiifModule(); + // Use the object mapper subclass. + protected ObjectMapper mapper = ObjectMapperFactory.getIiifObjectMapper(); + + @Autowired + protected BitstreamService bitstreamService; + + @Autowired + ConfigurationService configurationService; + + @Autowired + IIIFApiQueryService iiifApiQueryService; + + + public List getIIIFBundles(Item item) { + return IIIFSharedUtils.getIIIFBundles(item); + } + + public boolean isIIIFEnabled(Item item) { + return IIIFSharedUtils.isIIIFEnabled(item); + } + + /** + * Return all the bitstreams in the item to be used as IIIF resources + * + * @param context the DSpace Context + * @param item the DSpace item + * @return a not null list of bitstreams to use as IIIF resources in the + * manifest + */ + public List getIIIFBitstreams(Context context, Item item) { + List bitstreams = new ArrayList(); + for (Bundle bnd : IIIFSharedUtils.getIIIFBundles(item)) { + bitstreams + .addAll(getIIIFBitstreams(context, bnd)); + } + return bitstreams; + } + + /** + * Return all the bitstreams in the bundle to be used as IIIF resources + * + * @param context the DSpace Context + * @param bundle the DSpace Bundle + * @return a not null list of bitstreams to use as IIIF resources in the + * manifest + */ + public List getIIIFBitstreams(Context context, Bundle bundle) { + return bundle.getBitstreams().stream().filter(b -> isIIIFBitstream(context, b)) + .collect(Collectors.toList()); + } + + /** + * Utility method to check is a bitstream can be used as IIIF resources + * + * @param b the DSpace bitstream to check + * @return true if the bitstream can be used as IIIF resource + */ + public boolean isIIIFBitstream(Context context, Bitstream b) { + return checkImageMimeType(getBitstreamMimeType(b, context)) && b.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_ENABLED)) + .noneMatch(m -> m.getValue().equalsIgnoreCase("false") || m.getValue().equalsIgnoreCase("no")); + } + + /** + * Returns the bitstream mime type + * + * @param bitstream DSpace bitstream + * @param context DSpace context + * @return mime type + */ + public String getBitstreamMimeType(Bitstream bitstream, Context context) { + try { + BitstreamFormat bitstreamFormat = bitstream.getFormat(context); + return bitstreamFormat.getMIMEType(); + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + return null; + } + + /** + * Checks to see if the item is searchable. Based on the + * {@link #METADATA_IIIF_SEARCH_ENABLED} metadata. + * + * @param item DSpace item + * @return true if the iiif search is enabled + */ + public boolean isSearchable(Item item) { + return item.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals("iiif.search.enabled")) + .anyMatch(m -> m.getValue().equalsIgnoreCase("true") || + m.getValue().equalsIgnoreCase("yes")); + } + + /** + * Retrives a bitstream based on its position in the IIIF bundle. + * + * @param context DSpace Context + * @param item DSpace Item + * @param canvasPosition bitstream position + * @return bitstream or null if the specified canvasPosition doesn't exist in + * the manifest + */ + public Bitstream getBitstreamForCanvas(Context context, Item item, int canvasPosition) { + List bitstreams = getIIIFBitstreams(context, item); + return bitstreams.size() > canvasPosition ? bitstreams.get(canvasPosition) : null; + } + + /** + * Extracts canvas position from the URL input path. + * @param canvasId e.g. "c12" + * @return the position, e.g. 12 + */ + public int getCanvasId(String canvasId) { + return Integer.parseInt(canvasId.substring(1)); + } + + /** + * Returns the canvas path with position. The path + * returned is partial, not the fully qualified URI. + * @param position position of the bitstream in the DSpace bundle. + * @return partial canvas path. + */ + public String getCanvasId(int position) { + return CANVAS_PATH_BASE + position; + } + + /** + * Serializes the json response. + * @param resource to be serialized + * @return + */ + public String asJson(Resource resource) { + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + mapper.registerModule(iiifModule); + try { + return mapper.writeValueAsString(resource); + } catch (JsonProcessingException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + /** + * Tests for image mimetype. Presentation API 2.1.1 canvas supports images only. + * Other media types introduced in version 3. + * @param mimetype + * @return true if an image + */ + private boolean checkImageMimeType(String mimetype) { + if (mimetype != null && mimetype.contains("image/")) { + return true; + } + return false; + } + + /** + * Return all the bitstreams in the item to be used as annotations + * + * @param item the DSpace item + * @return a not null list of bitstreams to use as IIIF resources in the + * manifest + */ + public List getSeeAlsoBitstreams(Item item) { + List seeAlsoBitstreams = new ArrayList<>(); + List bundles = item.getBundles(OTHER_CONTENT_BUNDLE); + if (bundles.size() > 0) { + for (Bundle bundle : bundles) { + List bitstreams = bundle.getBitstreams(); + seeAlsoBitstreams.addAll(bitstreams); + } + } + return seeAlsoBitstreams; + } + + /** + * Return the custom iiif label for the resource or the provided default if none + * + * @param dso the dspace object to use as iiif resource + * @param defaultLabel the default label to return if none is specified in the + * metadata + * @return the iiif label for the dspace object + */ + public String getIIIFLabel(DSpaceObject dso, String defaultLabel) { + return dso.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_LABEL)) + .findFirst().map(m -> m.getValue()).orElse(defaultLabel); + } + + /** + * Return the custom iiif description for the resource or the provided default if none + * + * @param dso the dspace object to use as iiif resource + * @param defaultDescription the default description to return if none is specified in the + * metadata + * @return the iiif label for the dspace object + */ + public String getIIIFDescription(DSpaceObject dso, String defaultDescription) { + return dso.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_DESCRIPTION)) + .findFirst().map(m -> m.getValue()).orElse(defaultDescription); + } + + /** + * Return the table of contents (toc) positions in the iiif structure where the + * resource appears. Please note that the same resource can belong to multiple + * ranges (i.e. a page that contains the last paragraph of a section and start + * the new section) + * + * @param bitstream the dspace bitstream + * @param prefix a string to add to all the returned toc inherited from the + * parent dspace object + * @return the iiif tocs for the dspace object + */ + public List getIIIFToCs(Bitstream bitstream, String prefix) { + List tocs = bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_TOC)) + .map(m -> StringUtils.isNotBlank(prefix) ? prefix + TOC_SEPARATOR + m.getValue() : m.getValue()) + .collect(Collectors.toList()); + if (tocs.size() == 0 && StringUtils.isNotBlank(prefix)) { + return List.of(prefix); + } else { + return tocs; + } + } + + /** + * Retrieves image dimensions from the image server (IIIF Image API v.2.1.1). + * @param bitstream the bitstream DSO + * @return image dimensions + */ + @Cacheable(key = "#bitstream.getID().toString()", cacheNames = "canvasdimensions") + public int[] getImageDimensions(Bitstream bitstream) { + return iiifApiQueryService.getImageDimensions(bitstream); + } + + /** + * Test to see if the bitstream contains iiif image width metadata. + * @param bitstream the bitstream DSo + * @return true if width metadata was found + */ + public boolean hasWidthMetadata(Bitstream bitstream) { + return bitstream.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals("iiif.image.width")) + .findFirst().map(m -> m != null).orElse(false); + + } + + /** + * Return the iiif toc for the specified bundle + * + * @param bundle the dspace bundle + * @return the iiif toc for the specified bundle + */ + public String getBundleIIIFToC(Bundle bundle) { + String label = bundle.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_LABEL)) + .findFirst().map(m -> m.getValue()).orElse(getToCBundleLabel(bundle)); + return bundle.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_TOC)) + .findFirst().map(m -> m.getValue() + TOC_SEPARATOR + label).orElse(label); + } + + /** + * Excludes bundles found in the iiif.exclude.toc.bundle list + * + * @param bundle the dspace bundle + * @return bundle name or null if bundle is excluded + */ + private String getToCBundleLabel(Bundle bundle) { + String[] iiifAlternate = configurationService.getArrayProperty("iiif.exclude.toc.bundle"); + if (Arrays.stream(iiifAlternate).anyMatch(x -> x.contentEquals(bundle.getName()))) { + return null; + } + return bundle.getName(); + } + + /** + * Return the iiif viewing hint for the item + * + * @param item the dspace item + * @param defaultHint the default hint to apply if nothing else is defined at + * the item leve + * @return the iiif viewing hint for the item + */ + public String getIIIFViewingHint(Item item, String defaultHint) { + return item.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_VIEWING_HINT)) + .findFirst().map(m -> m.getValue()).orElse(defaultHint); + } + + /** + * Return the width for the canvas associated with the bitstream. If the + * bitstream doesn't provide directly the information it is retrieved from the + * bundle, item or default. + * + * @param bitstream the dspace bitstream used in the canvas + * @param bundle the bundle the bitstream belong to + * @param item the item the bitstream belong to + * @param defaultWidth the default width to apply if no other preferences are + * found + * @return the width in pixel for the canvas associated with the bitstream + */ + public int getCanvasWidth(Bitstream bitstream, Bundle bundle, Item item, int defaultWidth) { + return getSizeFromMetadata(bitstream, METADATA_IMAGE_WIDTH, + getSizeFromMetadata(bundle, METADATA_IMAGE_WIDTH, + getSizeFromMetadata(item, METADATA_IMAGE_WIDTH, defaultWidth))); + } + + /** + * Return the height for the canvas associated with the bitstream. If the + * bitstream doesn't provide directly the information it is retrieved from the + * bundle, item or default. + * + * @param bitstream the dspace bitstream used in the canvas + * @param bundle the bundle the bitstream belong to + * @param item the item the bitstream belong to + * @param defaultHeight the default width to apply if no other preferences are + * found + * @return the height in pixel for the canvas associated with the bitstream + */ + public int getCanvasHeight(Bitstream bitstream, Bundle bundle, Item item, int defaultHeight) { + return getSizeFromMetadata(bitstream, METADATA_IMAGE_HEIGHT, + getSizeFromMetadata(bundle, METADATA_IMAGE_HEIGHT, + getSizeFromMetadata(item, METADATA_IMAGE_HEIGHT, defaultHeight))); + } + + /** + * Utility method to extract an integer from metadata value. The defaultValue is + * returned if there are not values for the specified metadata or the value is + * not a valid integer. Only the first metadata value if any is used + * + * @param dso the dspace object + * @param metadata the metadata key (schema.element[.qualifier] + * @param defaultValue default to return if the metadata value is not an integer + * @return an integer from metadata value + */ + private int getSizeFromMetadata(DSpaceObject dso, String metadata, int defaultValue) { + return dso.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(metadata)) + .findFirst().map(m -> castToInt(m, defaultValue)) + .orElse(defaultValue); + } + + /** + * Utility method to cast a metadata value to int. The defaultInt is returned if + * the metadata value is not a valid integer + * + * @param m the metadata value + * @param defaultInt default to return if the metadata value is not an + * integer + * @return an int corresponding to the metadata value + */ + private int castToInt(MetadataValue m, int defaultInt) { + try { + return Integer.parseInt(m.getValue()); + } catch (NumberFormatException e) { + log.error("Error parsing " + m.getMetadataField().toString('.') + " of " + m.getDSpaceObject().getID() + + " the value " + m.getValue() + " is not an integer. Returning the default."); + } + return defaultInt; + } + + /** + * Return the prefix to use to generate canvas name for canvas that has no an + * explicit IIIF label + * + * @param item the DSpace Item + * @param defaultNaming a default to return if the item has not a custom value + * @return the prefix to use to generate canvas name for canvas that has no an + * explicit IIIF label + */ + public String getCanvasNaming(Item item, String defaultNaming) { + return item.getMetadata().stream() + .filter(m -> m.getMetadataField().toString('.').contentEquals(METADATA_IIIF_CANVAS_NAMING)) + .findFirst().map(m -> m.getValue()).orElse(defaultNaming); + } + +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/ImageProfileUtil.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/ImageProfileUtil.java new file mode 100644 index 000000000000..376e3076525a --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/ImageProfileUtil.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service.utils; + +import org.dspace.app.iiif.model.generator.ProfileGenerator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class ImageProfileUtil { + + @Autowired + ProfileGenerator profile; + + /** + * Utility method for obtaining the image service profile. + * + * @return image service profile + */ + public ProfileGenerator getImageProfile() throws + RuntimeException { + profile.setIdentifier("http://iiif.io/api/image/2/level1.json"); + return profile; + } +} diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/ThumbProfileUtil.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/ThumbProfileUtil.java new file mode 100644 index 000000000000..3652ab326fb7 --- /dev/null +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/utils/ThumbProfileUtil.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.iiif.service.utils; + +import org.dspace.app.iiif.model.generator.ProfileGenerator; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class ThumbProfileUtil { + + @Autowired + ProfileGenerator profile; + + /** + * Utility method for obtaining the thumbnail image service profile. + * Calling from this utility provides a unique instance of the + * autowired property. Necessary because a single canvas resource contains + * both thumbnail and images. + * + * @return the thumbnail service profile + */ + public ProfileGenerator getThumbnailProfile() throws + RuntimeException { + profile.setIdentifier("http://iiif.io/api/image/2/level0.json"); + return profile; + } + +} diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 7a6336ee4666..db8b55c79b5d 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -8,14 +8,14 @@ dspace-parent org.dspace - 7.0 + 8.0-SNAPSHOT .. ${basedir}/.. - 3.3.0 + 3.4.0 5.87.0.RELEASE @@ -35,24 +35,6 @@ - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - commons-cli @@ -73,45 +55,10 @@ xoai ${xoai.version} + - org.hamcrest - hamcrest-all - - - - org.mockito - mockito-all - - - xml-apis - xml-apis - - - org.apache.commons - commons-lang3 - - - log4j - log4j - - - org.slf4j - slf4j-log4j12 - - - - org.codehaus.woodstox - wstx-asl - - - - org.dom4j - dom4j - - - - com.lyncode - test-support + com.fasterxml.woodstox + woodstox-core @@ -209,7 +156,7 @@ org.hamcrest - hamcrest-all + hamcrest compile diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java index 700105899a4a..4f842b8e944c 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/XOAI.java @@ -8,6 +8,10 @@ package org.dspace.xoai.app; import static com.lyncode.xoai.dataprovider.core.Granularity.Second; +import static java.util.Objects.nonNull; +import static org.apache.commons.lang.StringUtils.EMPTY; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; import static org.dspace.xoai.util.ItemUtils.retrieveMetadata; import java.io.ByteArrayOutputStream; @@ -38,6 +42,8 @@ import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery.ORDER; import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrInputDocument; import org.dspace.authorize.ResourcePolicy; @@ -77,8 +83,8 @@ public class XOAI { private static Logger log = LogManager.getLogger(XOAI.class); + // needed because the solr query only returns 10 rows by default private final Context context; - private boolean optimize; private final boolean verbose; private boolean clean; @@ -94,8 +100,8 @@ public class XOAI { private final AuthorizeService authorizeService; private final ItemService itemService; - private final static ConfigurationService configurationService = DSpaceServicesFactory - .getInstance().getConfigurationService(); + private final static ConfigurationService configurationService = DSpaceServicesFactory.getInstance() + .getConfigurationService(); private List extensionPlugins; @@ -115,9 +121,8 @@ private List getFileFormats(Item item) { return formats; } - public XOAI(Context context, boolean optimize, boolean clean, boolean verbose) { + public XOAI(Context context, boolean clean, boolean verbose) { this.context = context; - this.optimize = optimize; this.clean = clean; this.verbose = verbose; @@ -152,9 +157,8 @@ public int index() throws DSpaceSolrIndexerException { System.out.println("Using full import."); result = this.indexAll(); } else { - SolrQuery solrParams = new SolrQuery("*:*") - .addField("item.lastmodified") - .addSort("item.lastmodified", ORDER.desc).setRows(1); + SolrQuery solrParams = new SolrQuery("*:*").addField("item.lastmodified") + .addSort("item.lastmodified", ORDER.desc).setRows(1); SolrDocumentList results = DSpaceSolrSearch.query(solrServerResolver.getServer(), solrParams); if (results.getNumFound() == 0) { @@ -167,13 +171,6 @@ public int index() throws DSpaceSolrIndexerException { } solrServerResolver.getServer().commit(); - - if (optimize) { - println("Optimizing Index"); - solrServerResolver.getServer().optimize(); - println("Index optimized"); - } - // Set last compilation date xoaiLastCompilationCacheService.put(new Date()); return result; @@ -183,12 +180,10 @@ public int index() throws DSpaceSolrIndexerException { } private int index(Date last) throws DSpaceSolrIndexerException, IOException { - System.out - .println("Incremental import. Searching for documents modified after: " - + last.toString()); + System.out.println("Incremental import. Searching for documents modified after: " + last.toString()); /* - * Index all changed or new items or items whose visibility is viable to - * change due to an embargo. + * Index all changed or new items or items whose visibility is viable to change + * due to an embargo. */ try { Iterator discoverableChangedItems = itemService @@ -204,31 +199,55 @@ private int index(Date last) throws DSpaceSolrIndexerException, IOException { } /** - * Get all items already in the index which are viable to change visibility - * due to an embargo. Only consider those which haven't been modified - * anyways since the last update, so they aren't updated twice in one import - * run. + * Get all items already in the index which are viable to change visibility due + * to an embargo. Only consider those which haven't been modified anyways since + * the last update, so they aren't updated twice in one import run. * - * @param last - * maximum date for an item to be considered for an update - * @return Iterator over list of items which might have changed their - * visibility since the last update. + * @param last maximum date for an item to be considered for an update + * @return Iterator over list of items which might have changed their visibility + * since the last update. * @throws DSpaceSolrIndexerException */ private Iterator getItemsWithPossibleChangesBefore(Date last) throws DSpaceSolrIndexerException, IOException { try { - SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id"); - SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params); + SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id").setRows(100) + .addSort("item.handle", SolrQuery.ORDER.asc); + SolrClient solrClient = solrServerResolver.getServer(); + List items = new LinkedList<>(); - for (int i = 0; i < documents.getNumFound(); i++) { - Item item = itemService.find(context, - UUID.fromString((String) documents.get(i).getFieldValue("item.id"))); - if (item.getLastModified().before(last)) { - items.add(item); + boolean done = false; + /* + * Using solr cursors to paginate and prevent the query from returning 10 + * SolrDocument objects only. + */ + String cursorMark = CURSOR_MARK_START; + String nextCursorMark = EMPTY; + + while (!done) { + params.set(CURSOR_MARK_PARAM, cursorMark); + QueryResponse response = solrClient.query(params); + nextCursorMark = response.getNextCursorMark(); + + for (SolrDocument document : response.getResults()) { + Item item = itemService.find(context, UUID.fromString((String) document.getFieldValue("item.id"))); + if (nonNull(item)) { + if (nonNull(item.getLastModified())) { + if (item.getLastModified().before(last)) { + items.add(item); + } + } else { + log.warn("Skipping item with id " + item.getID()); + } + } + } + + if (cursorMark.equals(nextCursorMark)) { + done = true; } + cursorMark = nextCursorMark; } return items.iterator(); - } catch (SolrServerException | SQLException | DSpaceSolrException ex) { + } catch (SolrServerException | SQLException ex) { throw new DSpaceSolrIndexerException(ex.getMessage(), ex); } } @@ -250,11 +269,10 @@ private int indexAll() throws DSpaceSolrIndexerException { } /** - * Check if an item is already indexed. Using this, it is possible to check - * if withdrawn or nondiscoverable items have to be indexed at all. + * Check if an item is already indexed. Using this, it is possible to check if + * withdrawn or nondiscoverable items have to be indexed at all. * - * @param item - * Item that should be checked for its presence in the index. + * @param item Item that should be checked for its presence in the index. * @return has it been indexed? */ private boolean checkIfIndexed(Item item) throws IOException { @@ -266,11 +284,11 @@ private boolean checkIfIndexed(Item item) throws IOException { return false; } } - /** + + /** * Check if an item is flagged visible in the index. * - * @param item - * Item that should be checked for its presence in the index. + * @param item Item that should be checked for its presence in the index. * @return has it been indexed? */ private boolean checkIfVisibleInOAI(Item item) throws IOException { @@ -287,8 +305,7 @@ private boolean checkIfVisibleInOAI(Item item) throws IOException { } } - private int index(Iterator iterator) - throws DSpaceSolrIndexerException { + private int index(Iterator iterator) throws DSpaceSolrIndexerException { try { int i = 0; int batchSize = configurationService.getIntProperty("oai.import.batch.size", 1000); @@ -302,7 +319,7 @@ private int index(Iterator iterator) } else { list.add(this.index(item)); } - //Uncache the item to keep memory consumption low + // Uncache the item to keep memory consumption low context.uncacheEntity(item); } catch (SQLException | IOException | XMLStreamException | WritingXmlException ex) { @@ -334,12 +351,11 @@ private int index(Iterator iterator) } /** - * Method to get the most recent date on which the item changed concerning - * the OAI deleted status (policy start and end dates for all anonymous READ + * Method to get the most recent date on which the item changed concerning the + * OAI deleted status (policy start and end dates for all anonymous READ * policies and the standard last modification date) * - * @param item - * Item + * @param item Item * @return date * @throws SQLException */ @@ -382,17 +398,16 @@ private SolrInputDocument index(Item item) boolean isIndexed = this.checkIfIndexed(item); /* - * If the item is not under embargo, it should be visible. If it is, - * make it invisible if this is the first time it is indexed. For - * subsequent index runs, keep the current status, so that if the item - * is embargoed again, it is flagged as deleted instead and does not - * just disappear, or if it is still under embargo, it won't become - * visible and be known to harvesters as deleted before it gets - * disseminated for the first time. The item has to be indexed directly - * after publication even if it is still embargoed, because its - * lastModified date will not change when the embargo end date (or start - * date) is reached. To circumvent this, an item which will change its - * status in the future will be marked as such. + * If the item is not under embargo, it should be visible. If it is, make it + * invisible if this is the first time it is indexed. For subsequent index runs, + * keep the current status, so that if the item is embargoed again, it is + * flagged as deleted instead and does not just disappear, or if it is still + * under embargo, it won't become visible and be known to harvesters as deleted + * before it gets disseminated for the first time. The item has to be indexed + * directly after publication even if it is still embargoed, because its + * lastModified date will not change when the embargo end date (or start date) + * is reached. To circumvent this, an item which will change its status in the + * future will be marked as such. */ boolean isPublic = isEmbargoed ? (isIndexed ? isCurrentlyVisible : false) : true; @@ -404,33 +419,31 @@ private SolrInputDocument index(Item item) doc.addField("item.willChangeStatus", willChangeStatus(item)); /* - * Mark an item as deleted not only if it is withdrawn, but also if it - * is made private, because items should not simply disappear from OAI - * with a transient deletion policy. Do not set the flag for still - * invisible embargoed items, because this will override the item.public - * flag. + * Mark an item as deleted not only if it is withdrawn, but also if it is made + * private, because items should not simply disappear from OAI with a transient + * deletion policy. Do not set the flag for still invisible embargoed items, + * because this will override the item.public flag. */ doc.addField("item.deleted", (item.isWithdrawn() || !item.isDiscoverable() || (isEmbargoed ? isPublic : false))); /* - * An item that is embargoed will potentially not be harvested by - * incremental harvesters if the from and until params do not encompass - * both the standard lastModified date and the anonymous-READ resource - * policy start date. The same is true for the end date, where - * harvesters might not get a tombstone record. Therefore, consider all - * relevant policy dates and the standard lastModified date and take the - * most recent of those which have already passed. + * An item that is embargoed will potentially not be harvested by incremental + * harvesters if the from and until params do not encompass both the standard + * lastModified date and the anonymous-READ resource policy start date. The same + * is true for the end date, where harvesters might not get a tombstone record. + * Therefore, consider all relevant policy dates and the standard lastModified + * date and take the most recent of those which have already passed. */ - doc.addField("item.lastmodified", SolrUtils.getDateFormatter() - .format(this.getMostRecentModificationDate(item))); + doc.addField("item.lastmodified", + SolrUtils.getDateFormatter().format(this.getMostRecentModificationDate(item))); if (item.getSubmitter() != null) { doc.addField("item.submitter", item.getSubmitter().getEmail()); } - for (Collection col: item.getCollections()) { + for (Collection col : item.getCollections()) { doc.addField("item.collections", "col_" + col.getHandle().replace("/", "_")); } for (Community com : collectionsService.flatParentCommunities(context, item)) { @@ -457,8 +470,7 @@ private SolrInputDocument index(Item item) // Message output before processing - for debugging purposes if (verbose) { - println(String.format("Item %s with handle %s is about to be indexed", - item.getID().toString(), handle)); + println(String.format("Item %s with handle %s is about to be indexed", item.getID().toString(), handle)); } ByteArrayOutputStream out = new ByteArrayOutputStream(); @@ -476,8 +488,7 @@ private SolrInputDocument index(Item item) doc.addField("item.compile", out.toString()); if (verbose) { - println(String.format("Item %s with handle %s indexed", - item.getID().toString(), handle)); + println(String.format("Item %s with handle %s indexed", item.getID().toString(), handle)); } return doc; @@ -510,12 +521,10 @@ private boolean isPublic(Item item) { return pub; } - private static boolean getKnownExplanation(Throwable t) { if (t instanceof ConnectException) { - System.err.println("Solr server (" - + configurationService.getProperty("oai.solr.url", "") - + ") is down, turn it on."); + System.err.println( + "Solr server (" + configurationService.getProperty("oai.solr.url", "") + ") is down, turn it on."); return true; } @@ -544,7 +553,7 @@ private void clearIndex() throws DSpaceSolrIndexerException { } private static void cleanCache(XOAIItemCacheService xoaiItemCacheService, XOAICacheService xoaiCacheService) - throws IOException { + throws IOException { System.out.println("Purging cached OAI responses."); xoaiItemCacheService.deleteAll(); xoaiCacheService.deleteAll(); @@ -557,10 +566,8 @@ private static void cleanCache(XOAIItemCacheService xoaiItemCacheService, XOAICa public static void main(String[] argv) throws IOException, ConfigurationException { - - AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext(new Class[] { - BasicConfiguration.class - }); + AnnotationConfigApplicationContext applicationContext = new AnnotationConfigApplicationContext( + new Class[] { BasicConfiguration.class }); XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class); XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class); @@ -571,21 +578,18 @@ public static void main(String[] argv) throws IOException, ConfigurationExceptio CommandLineParser parser = new DefaultParser(); Options options = new Options(); options.addOption("c", "clear", false, "Clear index before indexing"); - options.addOption("o", "optimize", false, - "Optimize index at the end"); options.addOption("v", "verbose", false, "Verbose output"); options.addOption("h", "help", false, "Shows some help"); options.addOption("n", "number", true, "FOR DEVELOPMENT MUST DELETE"); CommandLine line = parser.parse(options, argv); - String[] validSolrCommands = {COMMAND_IMPORT, COMMAND_CLEAN_CACHE}; - String[] validDatabaseCommands = {COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS, COMMAND_ERASE_COMPILED_ITEMS}; - + String[] validSolrCommands = { COMMAND_IMPORT, COMMAND_CLEAN_CACHE }; + String[] validDatabaseCommands = { COMMAND_CLEAN_CACHE, COMMAND_COMPILE_ITEMS, + COMMAND_ERASE_COMPILED_ITEMS }; boolean solr = true; // Assuming solr by default solr = !("database").equals(configurationService.getProperty("oai.storage", "solr")); - boolean run = false; if (line.getArgs().length > 0) { if (solr) { @@ -607,10 +611,7 @@ public static void main(String[] argv) throws IOException, ConfigurationExceptio if (COMMAND_IMPORT.equals(command)) { ctx = new Context(Context.Mode.READ_ONLY); - XOAI indexer = new XOAI(ctx, - line.hasOption('o'), - line.hasOption('c'), - line.hasOption('v')); + XOAI indexer = new XOAI(ctx, line.hasOption('c'), line.hasOption('v')); applicationContext.getAutowireCapableBeanFactory().autowireBean(indexer); @@ -635,8 +636,7 @@ public static void main(String[] argv) throws IOException, ConfigurationExceptio } System.out.println("OAI 2.0 manager action ended. It took " - + ((System.currentTimeMillis() - start) / 1000) - + " seconds."); + + ((System.currentTimeMillis() - start) / 1000) + " seconds."); } else { usage(); } @@ -688,7 +688,7 @@ private void compile() throws CompilingException { private static void usage() { boolean solr = true; // Assuming solr by default - solr = !("database").equals(configurationService.getProperty("oai.storage","solr")); + solr = !("database").equals(configurationService.getProperty("oai.storage", "solr")); if (solr) { System.out.println("OAI Manager Script"); @@ -697,7 +697,6 @@ private static void usage() { System.out.println(" " + COMMAND_IMPORT + " - To import DSpace items into OAI index and cache system"); System.out.println(" " + COMMAND_CLEAN_CACHE + " - Cleans the OAI cached responses"); System.out.println("> Parameters:"); - System.out.println(" -o Optimize index after indexing (" + COMMAND_IMPORT + " only)"); System.out.println(" -c Clear index (" + COMMAND_IMPORT + " only)"); System.out.println(" -v Verbose output"); System.out.println(" -h Shows this text"); diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java new file mode 100644 index 000000000000..3201a0229178 --- /dev/null +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xoai.app.plugins; + +import java.sql.SQLException; +import java.util.List; + +import com.lyncode.xoai.dataprovider.xml.xoai.Element; +import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; +import org.apache.commons.lang3.StringUtils; +import org.dspace.access.status.factory.AccessStatusServiceFactory; +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.xoai.app.XOAIExtensionItemCompilePlugin; +import org.dspace.xoai.util.ItemUtils; + +/** + * AccessStatusElementItemCompilePlugin aims to add structured information about the + * Access Status of the item (if any). + + * The xoai document will be enriched with a structure like that + *
    + * {@code
    + *   
    + *       
    + *          open.access
    + *       
    + *   
    + *   OR
    + *   
    + *       
    + *          embargo
    + *          2024-10-10
    + *       
    + *   
    + * }
    + * 
    + * Returning Values are based on: + * @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper + */ +public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin { + + @Override + public Metadata additionalMetadata(Context context, Metadata metadata, Item item) { + AccessStatusService accessStatusService = AccessStatusServiceFactory.getInstance().getAccessStatusService(); + + try { + String accessStatusType; + accessStatusType = accessStatusService.getAccessStatus(context, item); + + String embargoFromItem = accessStatusService.getEmbargoFromItem(context, item); + + Element accessStatus = ItemUtils.create("access-status"); + accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType)); + + if (StringUtils.isNotEmpty(embargoFromItem)) { + accessStatus.getField().add(ItemUtils.createValue("embargo", embargoFromItem)); + } + + Element others; + List elements = metadata.getElement(); + if (ItemUtils.getElement(elements, "others") != null) { + others = ItemUtils.getElement(elements, "others"); + } else { + others = ItemUtils.create("others"); + } + others.getElement().add(accessStatus); + + } catch (SQLException e) { + e.printStackTrace(); + } + + return metadata; + } + +} diff --git a/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java b/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java index 212f1e34064c..379f2fa18134 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java @@ -72,7 +72,12 @@ public class DSpaceOAIDataProvider { private DSpaceResumptionTokenFormatter resumptionTokenFormat = new DSpaceResumptionTokenFormatter(); - @RequestMapping({"", "/"}) + @RequestMapping("") + public void index(HttpServletResponse response, HttpServletRequest request) throws IOException { + response.sendRedirect(request.getRequestURI() + "/"); + } + + @RequestMapping({"/"}) public String indexAction(HttpServletResponse response, Model model) throws ServletException { try { XOAIManager manager = xoaiManagerResolver.getManager(); diff --git a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java index 26dd976495e6..83c4486f7134 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/resources/DSpaceResourceResolver.java @@ -12,7 +12,7 @@ import java.io.IOException; import java.io.InputStream; import javax.xml.transform.Source; -import javax.xml.transform.Transformer; +import javax.xml.transform.Templates; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamSource; @@ -22,6 +22,7 @@ import org.dspace.services.factory.DSpaceServicesFactory; public class DSpaceResourceResolver implements ResourceResolver { + // Requires usage of Saxon as OAI-PMH uses some XSLT 2 functions private static final TransformerFactory transformerFactory = TransformerFactory .newInstance("net.sf.saxon.TransformerFactoryImpl", null); @@ -39,8 +40,7 @@ public InputStream getResource(String path) throws IOException { } @Override - public Transformer getTransformer(String path) throws IOException, - TransformerConfigurationException { + public Templates getTemplates(String path) throws IOException, TransformerConfigurationException { // construct a Source that reads from an InputStream Source mySrc = new StreamSource(getResource(path)); // specify a system ID (the path to the XSLT-file on the filesystem) @@ -48,6 +48,6 @@ public Transformer getTransformer(String path) throws IOException, // XSLT-files (like ) String systemId = basePath + "/" + path; mySrc.setSystemId(systemId); - return transformerFactory.newTransformer(mySrc); + return transformerFactory.newTemplates(mySrc); } } diff --git a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/solr/DSpaceSolrServerResolver.java b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/solr/DSpaceSolrServerResolver.java index c544ec1659a8..5bb501885d5d 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/services/impl/solr/DSpaceSolrServerResolver.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/services/impl/solr/DSpaceSolrServerResolver.java @@ -12,6 +12,8 @@ import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.dspace.service.impl.HttpConnectionPoolService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.xoai.services.api.config.ConfigurationService; import org.dspace.xoai.services.api.solr.SolrServerResolver; import org.springframework.beans.factory.annotation.Autowired; @@ -27,8 +29,15 @@ public class DSpaceSolrServerResolver implements SolrServerResolver { public SolrClient getServer() throws SolrServerException { if (server == null) { String serverUrl = configurationService.getProperty("oai.solr.url"); + HttpConnectionPoolService httpConnectionPoolService + = DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServiceByName("solrHttpConnectionPoolService", + HttpConnectionPoolService.class); try { - server = new HttpSolrClient.Builder(serverUrl).build(); + server = new HttpSolrClient.Builder(serverUrl) + .withHttpClient(httpConnectionPoolService.getClient()) + .build(); log.debug("OAI Solr Server Initialized"); } catch (Exception e) { log.error("Could not initialize OAI Solr Server at " + serverUrl , e); diff --git a/dspace-oai/src/main/java/org/dspace/xoai/solr/DSpaceSolrServer.java b/dspace-oai/src/main/java/org/dspace/xoai/solr/DSpaceSolrServer.java index 158f73be1dcf..bf6b46807bf3 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/solr/DSpaceSolrServer.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/solr/DSpaceSolrServer.java @@ -13,6 +13,7 @@ import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.dspace.service.impl.HttpConnectionPoolService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; @@ -33,9 +34,16 @@ public static SolrClient getServer() throws SolrServerException { if (_server == null) { ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + HttpConnectionPoolService httpConnectionPoolService + = DSpaceServicesFactory.getInstance() + .getServiceManager() + .getServiceByName("solrHttpConnectionPoolService", + HttpConnectionPoolService.class); String serverUrl = configurationService.getProperty("oai.solr.url"); try { - _server = new HttpSolrClient.Builder(serverUrl).build(); + _server = new HttpSolrClient.Builder(serverUrl) + .withHttpClient(httpConnectionPoolService.getClient()) + .build(); log.debug("OAI Solr Server Initialized"); } catch (Exception e) { log.error("Could not initialize OAI Solr Server at " + serverUrl , e); diff --git a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java index 955c3a78c392..938cf0d64a5b 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/util/ItemUtils.java @@ -21,6 +21,8 @@ import org.dspace.app.util.factory.UtilServiceFactory; import org.dspace.app.util.service.MetadataExposureService; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Item; @@ -59,6 +61,10 @@ public class ItemUtils { private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); + + private static final AuthorizeService authorizeService + = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + /** * Default constructor */ @@ -103,6 +109,11 @@ private static Element createBundlesElement(Context context, Item item) throws S bundle.getElement().add(bitstreams); List bits = b.getBitstreams(); for (Bitstream bit : bits) { + // Check if bitstream is null and log the error + if (bit == null) { + log.error("Null bitstream found, check item uuid: " + item.getID()); + break; + } Element bitstream = create("bitstream"); bitstreams.getElement().add(bitstream); String url = ""; @@ -158,13 +169,17 @@ private static Element createLicenseElement(Context context, Item item) List licBits = licBundle.getBitstreams(); if (!licBits.isEmpty()) { Bitstream licBit = licBits.get(0); - InputStream in; - - in = bitstreamService.retrieve(context, licBit); - ByteArrayOutputStream out = new ByteArrayOutputStream(); - Utils.bufferedCopy(in, out); - license.getField().add(createValue("bin", Base64Utils.encode(out.toString()))); - + if (authorizeService.authorizeActionBoolean(context, licBit, Constants.READ)) { + InputStream in; + + in = bitstreamService.retrieve(context, licBit); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + Utils.bufferedCopy(in, out); + license.getField().add(createValue("bin", Base64Utils.encode(out.toString()))); + } else { + log.info("Missing READ rights for license bitstream. Did not include license bitstream for item: " + + item.getID() + "."); + } } } return license; diff --git a/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java b/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java index de76c992458c..0f48824159c2 100644 --- a/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java +++ b/dspace-oai/src/test/java/org/dspace/xoai/tests/integration/xoai/PipelineTest.java @@ -29,7 +29,7 @@ public void pipelineTest() throws Exception { InputStream input = PipelineTest.class.getClassLoader().getResourceAsStream("item.xml"); InputStream xslt = PipelineTest.class.getClassLoader().getResourceAsStream("oai_dc.xsl"); String output = FileUtils.readAllText(new XSLPipeline(input, true) - .apply(factory.newTransformer(new StreamSource(xslt))) + .apply(factory.newTemplates(new StreamSource(xslt))) .getTransformed()); assertThat(output, oai_dc().withXPath("/oai_dc:dc/dc:title", equalTo("Teste"))); diff --git a/dspace-oai/src/test/java/org/dspace/xoai/tests/stylesheets/AbstractXSLTest.java b/dspace-oai/src/test/java/org/dspace/xoai/tests/stylesheets/AbstractXSLTest.java index 6fab56b52623..42dbed04b63b 100644 --- a/dspace-oai/src/test/java/org/dspace/xoai/tests/stylesheets/AbstractXSLTest.java +++ b/dspace-oai/src/test/java/org/dspace/xoai/tests/stylesheets/AbstractXSLTest.java @@ -19,6 +19,7 @@ import org.apache.commons.io.IOUtils; public abstract class AbstractXSLTest { + // Requires usage of Saxon as OAI-PMH uses some XSLT 2 functions private static final TransformerFactory factory = TransformerFactory .newInstance("net.sf.saxon.TransformerFactoryImpl", null); diff --git a/dspace-oai/src/test/resources/item.xml b/dspace-oai/src/test/resources/item.xml index c6eebc4cf7a7..b020a490bf3e 100644 --- a/dspace-oai/src/test/resources/item.xml +++ b/dspace-oai/src/test/resources/item.xml @@ -70,7 +70,7 @@ 10.1.1.13.814.pdf application/pdf 449025 - http://localhost:8080/xmlui/bitstream/123456789/3/1/10.1.1.13.814.pdf + http://localhost:8080/bitstream/123456789/3/1/10.1.1.13.814.pdf de2290628b85b17abdf829d121793c77 MD5 1 @@ -85,7 +85,7 @@ license.txt text/plain; charset=utf-8 1748 - http://localhost:8080/xmlui/bitstream/123456789/3/2/license.txt + http://localhost:8080/bitstream/123456789/3/2/license.txt 8a4605be74aa9ea9d79846c1fba20a33 MD5 2 diff --git a/dspace-oai/src/test/resources/xoai-test1.xml b/dspace-oai/src/test/resources/xoai-test1.xml index a621d0518fe2..0053e63e102e 100644 --- a/dspace-oai/src/test/resources/xoai-test1.xml +++ b/dspace-oai/src/test/resources/xoai-test1.xml @@ -115,7 +115,7 @@ text/html 39970 - http://demo.dspace.org/xmlui/bitstream/10673/4/1/Lily-cat-of-day.htm + http://demo.dspace.org/bitstream/10673/4/1/Lily-cat-of-day.htm aae901336b56ae14070fdec8c79dd48e MD5 @@ -125,7 +125,7 @@ cl_style.css text/css 1181 - http://demo.dspace.org/xmlui/bitstream/10673/4/2/cl_style.css + http://demo.dspace.org/bitstream/10673/4/2/cl_style.css 58178d41c221520d88333b9d482b31b8 MD5 @@ -136,7 +136,7 @@ image/jpeg 83390 - http://demo.dspace.org/xmlui/bitstream/10673/4/3/kitty-1257950690.jpg + http://demo.dspace.org/bitstream/10673/4/3/kitty-1257950690.jpg 729596bb3ad09ebe958a150787418212 MD5 @@ -147,7 +147,7 @@ image/jpeg 86057 - http://demo.dspace.org/xmlui/bitstream/10673/4/4/kitty-1257950690_002.jpg + http://demo.dspace.org/bitstream/10673/4/4/kitty-1257950690_002.jpg 37c07c8488042d064bd945765d9e2f98 MD5 @@ -157,7 +157,7 @@ logo.png image/png 4157 - http://demo.dspace.org/xmlui/bitstream/10673/4/5/logo.png + http://demo.dspace.org/bitstream/10673/4/5/logo.png d64fb5f69c7820685d5ed3037b9670ed MD5 5 @@ -166,7 +166,7 @@ style.css text/css 49623 - http://demo.dspace.org/xmlui/bitstream/10673/4/6/style.css + http://demo.dspace.org/bitstream/10673/4/6/style.css 89c2a0557993a8665574c0b52fc1582d MD5 6 @@ -175,7 +175,7 @@ stylesheet.css text/css 877 - http://demo.dspace.org/xmlui/bitstream/10673/4/7/stylesheet.css + http://demo.dspace.org/bitstream/10673/4/7/stylesheet.css d2b580ac1c89ae88dc05dcd9108b002e MD5 @@ -191,7 +191,7 @@ license.txt text/plain; charset=utf-8 1748 - http://demo.dspace.org/xmlui/bitstream/10673/4/8/license.txt + http://demo.dspace.org/bitstream/10673/4/8/license.txt 8a4605be74aa9ea9d79846c1fba20a33 MD5 diff --git a/dspace-rdf/pom.xml b/dspace-rdf/pom.xml index 172e7cc79677..f21381eb4ea3 100644 --- a/dspace-rdf/pom.xml +++ b/dspace-rdf/pom.xml @@ -9,7 +9,7 @@ org.dspace dspace-parent - 7.0 + 8.0-SNAPSHOT .. diff --git a/dspace-rest/README.md b/dspace-rest/README.md deleted file mode 100644 index 07d71d66ed67..000000000000 --- a/dspace-rest/README.md +++ /dev/null @@ -1,194 +0,0 @@ -#DSpace REST API (Jersey) - DEPRECATED - -A RESTful web services API for DSpace, built using JAX-RS1 JERSEY. - -_This REST API has been deprecated and will be removed in v8. Please use the Server API (/server) webapp instead._ - -##Getting Started -This REST API is integrated directly into the DSpace codebase. - - * Rebuild as usual: mvn + ant - * Deploy the webapp (i.e to Tomcat) - * `````` - - -REST API can do all CRUD (create, read, update, delete) operations over communities, collections, items, bitstream and bitstream policies. Without logging into the REST API, you have read access as an anonymous user (member of the Anonymous group). If you want to make changes in DSpace using the REST API, you must log into the API using the "login" endpoint and then use the returned token in request header of your subsequent API calls. - -##Endpoints - -| Resource |CREATE|READ list|READ single|Edit|Delete|Search| -| ------------- |------|:-------:|-----------|----|------|------| -| /communities | Y | Y | Y | Y | Y | | -| /collections | Y | Y | Y | Y | Y | Y | -| /items | Y | Y | Y | Y | Y | Y | -| /bitstreams | Y | Y | Y | Y | Y | || - -Search in collections is possible only by name and search in items only by metadata field. - -###Index -Get information on how to use the API -- GET http://localhost:8080 - -Test whether the REST API is running and available -- GET http://localhost:8080/rest/test - -Log into REST API -- POST http://localhost:8080/rest/login - -Logout from REST API -- POST http://localhost:8080/rest/logout - -Get status of REST API and the logged-in user -- GET http://localhost:8080/rest/status - - -###Communities -View the list of top-level communities -- GET http://localhost:8080/rest/communities/top-communities - -View the list of all communities -- GET http://localhost:8080/rest/communities[?expand={collections,parentCommunity,subCommunities,logo,all}] - -View a specific community -- GET http://localhost:8080/rest/communities/:ID[?expand={collections,parentCommunity,subCommunities,logo,all}] - -View the list of subcollections in community -- GET http://localhost:8080/rest/communities/:ID/collections[?expand={items,parentCommunityList,license,logo,all}] - -View the list of subcommunities in community -- GET http://localhost:8080/rest/communities/:ID/communities[?expand={collections,parentCommunity,subCommunities,logo,all}] - -Create new top-level community -- POST http://localhost:8080/rest/communities - -Create new subcollection in community -- POST http://localhost:8080/rest/communities/:ID/collections - -Create new subcommunity in community -- POST http://localhost:8080/rest/communities/:ID/communities - -Update community -- PUT http://localhost:8080/rest/communities/:ID - -Delete community -- DELETE http://localhost:8080/rest/communities/:ID - -Delete subcollection in community -- DELETE http://localhost:8080/rest/communities/:ID/collections/:ID - -Delete subcommunity in community -- DELETE http://localhost:8080/rest/communities/:ID/communities/:ID - - -###Collections -View the list of collections -- GET http://localhost:8080/rest/collections[?expand={items,parentCommunityList,license,logo,all}] - -View a specific collection -- GET http://localhost:8080/rest/collections/:ID[?expand={items,parentCommunityList,license,logo,all}] - -View items in collection -- GET http://localhost:8080/rest/collections/:ID/items[?expand={metadata,parentCollection,parentcollectionList,parentCommunityList,bitstreams,all}] - -Create item in collection -- POST http://localhost:8080/rest/collections/:ID/items - -Find collection by name -- POST http://localhost:8080/rest/collections/find-collection - -Update collection -- PUT http://localhost:8080/rest/collections/:ID - -Delete collection -- DELETE http://localhost:8080/rest/collections/:ID - -Delete item in collection -- DELETE http://localhost:8080/rest/collections/:ID/items/:ID - - -###Items -View the list of items -- GET http://localhost:8080/rest/items[?expand={metadata,parentCollection,parentcollectionList,parentCommunityList,bitstreams,all}] - -View speciific item -- GET http://localhost:8080/rest/items/:ID[?expand={metadata,parentCollection,parentcollectionList,parentCommunityList,bitstreams,all}] - -View an Item and view its bitstreams -- GET http://localhost:8080/rest/items/:ID/bitstreams[?expand={parent,policies,all}] - -View an Item, and view its metadata -- GET http://localhost:8080/rest/items/:ID/metadata - -Find item by metadata -- POST http://localhost:8080/rest/items/find-by-metadata-field - -Add metadata to item -- POST http://localhost:8080/rest/items/:ID/metadata - -Create bitstream in item -- POST http://localhost:8080/rest/items/:ID/bitstreams - -Update metadata in item -- PUT http://localhost:8080/rest/items/:ID/metadata - -Delete item -- DELETE http://localhost:8080/rest/items/:ID - -Delete all metadata in item -- DELETE http://localhost:8080/rest/items/:ID/metadata - -Delete bitstream in item -- DELETE http://localhost:8080/rest/items/:ID/bitstreams/:ID - - -###Bitstreams -View the list of bitstreams -- GET http://localhost:8080/rest/bitstreams[?expand={parent,policies,all}] - -View information about a bitstream -- GET http://localhost:8080/rest/bitstreams/:ID[?expand={parent,policies,all}] - -View/Download a specific Bitstream -- GET http://localhost:8080/rest/bitstreams/:ID/retrieve - -View the list of policies of bitstream -- GET http://localhost:8080/rest/bitstreams/:ID/policy - -Add policy to bitstream -- POST http://localhost:8080/rest/bitstreams/:ID/policy - -Update bitstream -- PUT http://localhost:8080/rest/bitstreams/:ID - -Update data of bitstream -- PUT http://localhost:8080/rest/bitstreams/:ID/data - -Delete bitstream -- DELETE http://localhost:8080/rest/bitstreams/:ID - -Delete policy of bitstream -- DELETE http://localhost:8080/rest/bitstreams/:ID/policy/:ID - - -####Statistics -Recording view events of items and download events of bitstreams (set stats = true in rest.cfg to enable recording of events) -http://localhost:8080/rest/items/:ID?userIP=ip&userAgent=userAgent&xforwardedfor=xforwardedfor -If no parameters are given, the details of the HTTP request sender are used in statistics. -This enables tools like proxies to supply the details of their user rather than themselves. - - -###Handles -Lookup a DSpaceObject by its Handle, this produces the name/ID that you look up in /bitstreams, /items, /collections, /communities -- http://localhost:8080/rest/handle/{prefix}/{suffix} - -##Expand -There is an ?expand= query parameter for more expensive operations. You can add it at the end of the request URL. -It is optional, all, some or none. The response will usually indicate what the available "expand" options are. - -##HTTP Responses -* 200 OK - The requested object/objects exists -* 401 Unauthorized - The anonymous user does not have READ access to that object -* 404 Not Found - The specified object doesn't exist -* 405 Method Not Allowed - Wrong request method (GET,POST,PUT,DELETE) or wrong data format (JSON/XML). -* 415 Unsupported Media Type - Missing "Content-Type: application/json" or "Content-Type: application/xml" request header -* 500 Server Error - Likely a SQLException, IOException, more details in the logs. diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml deleted file mode 100644 index bc61fefe7db2..000000000000 --- a/dspace-rest/pom.xml +++ /dev/null @@ -1,263 +0,0 @@ - - 4.0.0 - org.dspace - dspace-rest - war - 7.0 - DSpace (Deprecated) REST Webapp - DSpace RESTful Web Services API. NOTE: this REST API is DEPRECATED. - Please consider using the REST API in the dspace-server-webapp instead! - http://demo.dspace.org - - - org.dspace - dspace-parent - 7.0 - .. - - - - - ${basedir}/.. - 5.3.10.RELEASE - - - - - org.apache.maven.plugins - maven-war-plugin - - true - - true - - - - com.mycila - license-maven-plugin - - - - **/static/reports/spin.js - **/static/reports/README.md - **/*.xsd - - - - - - - - - - org.glassfish.jersey.core - jersey-server - ${jersey.version} - - - org.glassfish.jersey.containers - jersey-container-servlet - ${jersey.version} - - - org.glassfish.jersey.media - jersey-media-json-jackson - ${jersey.version} - - - com.fasterxml.jackson.core - jackson-module-jaxb-annotations - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-base - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-json-provider - - - com.fasterxml.jackson.module - jackson-module-jaxb-annotations - - - - - - com.fasterxml.jackson.core - jackson-annotations - ${jackson.version} - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-base - ${jackson.version} - - - com.fasterxml.jackson.jaxrs - jackson-jaxrs-json-provider - ${jackson.version} - - - com.fasterxml.jackson.module - jackson-module-jaxb-annotations - ${jackson.version} - - - - - org.springframework - spring-core - - - - org.springframework - spring-context - - - - org.springframework - spring-web - - - - - org.glassfish.jersey.ext - jersey-spring5 - ${jersey.version} - - - - org.springframework - spring - - - org.springframework - spring-core - - - org.springframework - spring-web - - - org.springframework - spring-beans - - - org.springframework - spring-context - - - org.springframework - spring-aop - - - - jakarta.annotation - jakarta.annotation-api - - - - org.ow2.asm - asm-commons - - - - - org.springframework.security - spring-security-core - ${spring-security.version} - - - - org.springframework - spring-expression - - - - - org.springframework.security - spring-security-web - ${spring-security.version} - - - - org.springframework - spring-expression - - - - - org.springframework.security - spring-security-config - ${spring-security.version} - - - - cglib - cglib - 2.2.2 - - - - - org.dspace - dspace-api - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - com.fasterxml.jackson.core - jackson-annotations - - - - - - - org.apache.commons - commons-dbcp2 - - - org.postgresql - postgresql - - - javax.servlet - javax.servlet-api - provided - - - org.atteo - evo-inflector - 1.2.1 - - - org.apache.logging.log4j - log4j-api - - - org.apache.logging.log4j - log4j-core - - - org.apache.logging.log4j - log4j-web - - - org.dspace - dspace-services - - - junit - junit - test - - - diff --git a/dspace-rest/src/main/java/org/dspace/rest/BitstreamResource.java b/dspace-rest/src/main/java/org/dspace/rest/BitstreamResource.java deleted file mode 100644 index 3a6ad859603e..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/BitstreamResource.java +++ /dev/null @@ -1,783 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import java.io.IOException; -import java.io.InputStream; -import java.net.URLConnection; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.AuthorizeException; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.authorize.service.ResourcePolicyService; -import org.dspace.content.BitstreamFormat; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.BitstreamFormatService; -import org.dspace.content.service.BitstreamService; -import org.dspace.content.service.BundleService; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.eperson.service.GroupService; -import org.dspace.rest.common.Bitstream; -import org.dspace.rest.common.ResourcePolicy; -import org.dspace.rest.exceptions.ContextException; -import org.dspace.storage.bitstore.factory.StorageServiceFactory; -import org.dspace.storage.bitstore.service.BitstreamStorageService; -import org.dspace.usage.UsageEvent; - -/** - * @author Rostislav Novak (Computing and Information Centre, CTU in Prague) - */ -// Every DSpace class used without namespace is from package -// org.dspace.rest.common.*. Otherwise namespace is defined. -@Path("/bitstreams") -public class BitstreamResource extends Resource { - protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); - protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService(); - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - protected BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance() - .getBitstreamFormatService(); - protected BitstreamStorageService bitstreamStorageService = StorageServiceFactory.getInstance() - .getBitstreamStorageService(); - protected ResourcePolicyService resourcePolicyService = AuthorizeServiceFactory.getInstance() - .getResourcePolicyService(); - protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(BitstreamResource.class); - - /** - * Return bitstream properties without file data. It can throw - * WebApplicationException with three response codes. Response code - * NOT_FOUND(404) or UNAUTHORIZED(401) or INTERNAL_SERVER_ERROR(500). Bad - * request is when the bitstream id does not exist. UNAUTHORIZED if the user - * logged into the DSpace context does not have the permission to access the - * bitstream. Server error when something went wrong. - * - * @param bitstreamId Id of bitstream in DSpace. - * @param expand This string defines which additional optional fields will be added - * to bitstream response. Individual options are separated by commas without - * spaces. The options are: "all", "parent". - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return If user is allowed to read bitstream, it returns instance of - * bitstream. Otherwise, it throws WebApplicationException with - * response code UNAUTHORIZED. - * @throws WebApplicationException It can happen on: Bad request, unauthorized, SQL exception - * and context exception(could not create context). - */ - @GET - @Path("/{bitstream_id}") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Bitstream getBitstream(@PathParam("bitstream_id") String bitstreamId, @QueryParam("expand") String expand, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading bitstream(id=" + bitstreamId + ") metadata."); - org.dspace.core.Context context = null; - Bitstream bitstream = null; - - try { - context = createContext(); - org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, - org.dspace.core.Constants.READ); - - writeStats(dspaceBitstream, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers, - request, context); - - bitstream = new Bitstream(dspaceBitstream, servletContext, expand, context); - context.complete(); - log.trace("Bitstream(id=" + bitstreamId + ") was successfully read."); - - } catch (SQLException e) { - processException( - "Someting went wrong while reading bitstream(id=" + bitstreamId + ") from database! Message: " + e, - context); - } catch (ContextException e) { - processException( - "Someting went wrong while reading bitstream(id=" + bitstreamId + "), ContextException. Message: " - + e.getMessage(), context); - } finally { - processFinally(context); - } - - return bitstream; - } - - /** - * Return all bitstream resource policies from all bundles, in which - * the bitstream is present. - * - * @param bitstreamId Id of bitstream in DSpace. - * @param headers If you want to access the item as the user logged into the context. - * The header "rest-dspace-token" with the token passed - * from the login method must be set. - * @return Returns an array of ResourcePolicy objects. - */ - @GET - @Path("/{bitstream_id}/policy") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public ResourcePolicy[] getBitstreamPolicies(@PathParam("bitstream_id") String bitstreamId, - @Context HttpHeaders headers) { - - log.info("Reading bitstream(id=" + bitstreamId + ") policies."); - org.dspace.core.Context context = null; - ResourcePolicy[] policies = null; - - try { - context = createContext(); - org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, - org.dspace.core.Constants.READ); - policies = new Bitstream(dspaceBitstream, servletContext, "policies", context).getPolicies(); - - context.complete(); - log.trace("Policies for bitstream(id=" + bitstreamId + ") was successfully read."); - - } catch (SQLException e) { - processException("Someting went wrong while reading policies of bitstream(id=" + bitstreamId - + "), SQLException! Message: " + e, context); - } catch (ContextException e) { - processException("Someting went wrong while reading policies of bitstream(id=" + bitstreamId - + "), ContextException. Message: " + e.getMessage(), context); - } finally { - processFinally(context); - } - - return policies; - } - - /** - * Read list of bitstreams. It throws WebApplicationException with response - * code INTERNAL_SERVER_ERROR(500), if there was problem while reading - * bitstreams from database. - * - * @param expand This string defines which additional optional fields will be added - * to bitstream response. Individual options are separated by commas without - * spaces. The options are: "all", "parent". - * @param limit How many bitstreams will be in the list. Default value is 100. - * @param offset On which offset (item) the list starts. Default value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The header "rest-dspace-token" with the token passed - * from the login method must be set. - * @param request Servlet's HTTP request object. - * @return Returns an array of bistreams. Array doesn't contain bitstreams for - * which the user doesn't have read permission. - * @throws WebApplicationException Thrown in case of a problem with reading the database or with - * creating a context. - */ - @GET - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Bitstream[] getBitstreams(@QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("100") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading bitstreams.(offset=" + offset + ",limit=" + limit + ")"); - org.dspace.core.Context context = null; - List bitstreams = new ArrayList(); - - try { - context = createContext(); - List dspaceBitstreams = bitstreamService.findAll(context); - - if (!((limit != null) && (limit >= 0) && (offset != null) && (offset >= 0))) { - log.warn("Paging was badly set."); - limit = 100; - offset = 0; - } - - // TODO If bitstream doesn't exist, throws exception. - for (int i = offset; (i < (offset + limit)) && (i < dspaceBitstreams.size()); i++) { - if (authorizeService - .authorizeActionBoolean(context, dspaceBitstreams.get(i), org.dspace.core.Constants.READ)) { - if (bitstreamService.getParentObject(context, dspaceBitstreams - .get(i)) != null) { // To eliminate bitstreams which cause exception, because of - // reading under administrator permissions - bitstreams.add(new Bitstream(dspaceBitstreams.get(i), servletContext, expand, context)); - writeStats(dspaceBitstreams.get(i), UsageEvent.Action.VIEW, user_ip, user_agent, - xforwardedfor, headers, request, context); - } - } - } - - context.complete(); - log.trace("Bitstreams were successfully read."); - - } catch (SQLException e) { - processException("Something went wrong while reading bitstreams from database!. Message: " + e, context); - } catch (ContextException e) { - processException( - "Something went wrong while reading bitstreams, ContextException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - return bitstreams.toArray(new Bitstream[0]); - } - - /** - * Read bitstream data. May throw WebApplicationException with the - * INTERNAL_SERVER_ERROR(500) code. Caused by three exceptions: IOException if - * there was a problem with reading bitstream file. SQLException if there was - * a problem while reading from database. And AuthorizeException if there was - * a problem with authorization of user logged to DSpace context. - * - * @param bitstreamId Id of the bitstream, whose data will be read. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The header "rest-dspace-token" with the token passed - * from the login method must be set. - * @param request Servlet's HTTP request object. - * @return Returns response with data with file content type. It can - * return the NOT_FOUND(404) response code in case of wrong bitstream - * id. Or response code UNAUTHORIZED(401) if user is not - * allowed to read bitstream. - * @throws WebApplicationException Thrown if there was a problem: reading the file data; or reading - * the database; or creating the context; or with authorization. - */ - @GET - @Path("/{bitstream_id}/retrieve") - public javax.ws.rs.core.Response getBitstreamData(@PathParam("bitstream_id") String bitstreamId, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading data of bitstream(id=" + bitstreamId + ")."); - org.dspace.core.Context context = null; - InputStream inputStream = null; - String type = null; - String name = null; - - try { - context = createContext(); - org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, - org.dspace.core.Constants.READ); - - writeStats(dspaceBitstream, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers, - request, context); - - log.trace("Bitstream(id=" + bitstreamId + ") data was successfully read."); - inputStream = bitstreamService.retrieve(context, dspaceBitstream); - type = dspaceBitstream.getFormat(context).getMIMEType(); - name = dspaceBitstream.getName(); - - context.complete(); - } catch (IOException e) { - processException("Could not read file of bitstream(id=" + bitstreamId + ")! Message: " + e, context); - } catch (SQLException e) { - processException( - "Something went wrong while reading bitstream(id=" + bitstreamId + ") from database! Message: " + e, - context); - } catch (AuthorizeException e) { - processException( - "Could not retrieve file of bitstream(id=" + bitstreamId + "), AuthorizeException! Message: " + e, - context); - } catch (ContextException e) { - processException( - "Could not retrieve file of bitstream(id=" + bitstreamId + "), ContextException! Message: " + e - .getMessage(), - context); - } finally { - processFinally(context); - } - - return Response.ok(inputStream).type(type) - .header("Content-Disposition", "attachment; filename=\"" + name + "\"") - .build(); - } - - /** - * Add bitstream policy to all bundles containing the bitstream. - * - * @param bitstreamId Id of bitstream in DSpace. - * @param policy Policy to be added. The following attributes are not - * applied: epersonId, - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The header "rest-dspace-token" with the token passed - * from the login method must be set. - * @param request Servlet's HTTP request object. - * @return Returns ok, if all was ok. Otherwise status code 500. - */ - @POST - @Path("/{bitstream_id}/policy") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public javax.ws.rs.core.Response addBitstreamPolicy(@PathParam("bitstream_id") String bitstreamId, - ResourcePolicy policy, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Adding bitstream(id=" + bitstreamId + ") " + policy - .getAction() + " policy with permission for group(id=" + policy.getGroupId() - + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, - org.dspace.core.Constants.WRITE); - - writeStats(dspaceBitstream, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers, - request, context); - - addPolicyToBitstream(context, policy, dspaceBitstream); - - context.complete(); - log.trace("Policy for bitstream(id=" + bitstreamId + ") was successfully added."); - - } catch (SQLException e) { - processException("Someting went wrong while adding policy to bitstream(id=" + bitstreamId - + "), SQLException! Message: " + e, context); - } catch (ContextException e) { - processException("Someting went wrong while adding policy to bitstream(id=" + bitstreamId - + "), ContextException. Message: " + e.getMessage(), context); - } catch (AuthorizeException e) { - processException("Someting went wrong while adding policy to bitstream(id=" + bitstreamId - + "), AuthorizeException! Message: " + e, context); - } finally { - processFinally(context); - } - return Response.status(Status.OK).build(); - } - - /** - * Update bitstream metadata. Replaces everything on targeted bitstream. - * May throw WebApplicationException caused by two exceptions: - * SQLException, if there was a problem with the database. AuthorizeException if - * there was a problem with the authorization to edit bitstream metadata. - * - * @param bitstreamId Id of bistream to be updated. - * @param bitstream Bitstream with will be placed. It must have filled user - * credentials. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The header "rest-dspace-token" with the token passed - * from the login method must be set. - * @param request Servlet's HTTP request object. - * @return Return response codes: OK(200), NOT_FOUND(404) if bitstream does - * not exist and UNAUTHORIZED(401) if user is not allowed to write - * to bitstream. - * @throws WebApplicationException Thrown when: Error reading from database; or error - * creating context; or error regarding bitstream authorization. - */ - @PUT - @Path("/{bitstream_id}") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response updateBitstream(@PathParam("bitstream_id") String bitstreamId, Bitstream bitstream, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Updating bitstream(id=" + bitstreamId + ") metadata."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, - org.dspace.core.Constants.WRITE); - - writeStats(dspaceBitstream, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - log.trace("Updating bitstream metadata."); - - dspaceBitstream.setDescription(context, bitstream.getDescription()); - if (getMimeType(bitstream.getName()) == null) { - BitstreamFormat unknownFormat = bitstreamFormatService.findUnknown(context); - bitstreamService.setFormat(context, dspaceBitstream, unknownFormat); - } else { - BitstreamFormat guessedFormat = bitstreamFormatService - .findByMIMEType(context, getMimeType(bitstream.getName())); - bitstreamService.setFormat(context, dspaceBitstream, guessedFormat); - } - dspaceBitstream.setName(context, bitstream.getName()); - Integer sequenceId = bitstream.getSequenceId(); - if (sequenceId != null && sequenceId.intValue() != -1) { - dspaceBitstream.setSequenceID(sequenceId); - } - - bitstreamService.update(context, dspaceBitstream); - - if (bitstream.getPolicies() != null) { - log.trace("Updating bitstream policies."); - - // Remove all old bitstream policies. - authorizeService.removeAllPolicies(context, dspaceBitstream); - - // Add all new bitstream policies - for (ResourcePolicy policy : bitstream.getPolicies()) { - addPolicyToBitstream(context, policy, dspaceBitstream); - } - } - - context.complete(); - - } catch (SQLException e) { - processException("Could not update bitstream(id=" + bitstreamId + ") metadata, SQLException. Message: " + e, - context); - } catch (AuthorizeException e) { - processException( - "Could not update bitstream(id=" + bitstreamId + ") metadata, AuthorizeException. Message: " + e, - context); - } catch (ContextException e) { - processException( - "Could not update bitstream(id=" + bitstreamId + ") metadata, ContextException. Message: " + e - .getMessage(), - context); - } finally { - processFinally(context); - } - - log.info("Bitstream metadata(id=" + bitstreamId + ") were successfully updated."); - return Response.ok().build(); - } - - /** - * Update bitstream data. Changes bitstream data by editing database rows. - * May throw WebApplicationException caused by: SQLException if there was - * a problem editing or reading the database, IOException if there was - * a problem with reading from InputStream, Exception if there was another - * problem. - * - * @param bitstreamId Id of bistream to be updated. - * @param is InputStream filled with new data. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The header "rest-dspace-token" with the token passed - * from the login method must be set. - * @param request Servlet's HTTP request object. - * @return Return response if bitstream was updated. Response codes: - * OK(200), NOT_FOUND(404) if id of bitstream was bad. And - * UNAUTHORIZED(401) if user is not allowed to update bitstream. - * @throws WebApplicationException This exception can be thrown in this cases: Problem with - * reading or writing to database. Or problem with reading from - * InputStream. - */ - // TODO Change to better logic, without editing database. - @PUT - @Path("/{bitstream_id}/data") - public Response updateBitstreamData(@PathParam("bitstream_id") String bitstreamId, InputStream is, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Updating bitstream(id=" + bitstreamId + ") data."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, - org.dspace.core.Constants.WRITE); - - writeStats(dspaceBitstream, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - log.trace("Creating new bitstream."); - - UUID newBitstreamId = bitstreamStorageService.store(context, dspaceBitstream, is); - log.trace("Bitstream data stored: " + newBitstreamId); - context.complete(); - } catch (SQLException e) { - processException("Could not update bitstream(id=" + bitstreamId + ") data, SQLException. Message: " + e, - context); - } catch (IOException e) { - processException("Could not update bitstream(id=" + bitstreamId + ") data, IOException. Message: " + e, - context); - } catch (ContextException e) { - processException( - "Could not update bitstream(id=" + bitstreamId + ") data, ContextException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.info("Bitstream(id=" + bitstreamId + ") data was successfully updated."); - return Response.ok().build(); - } - - /** - * Delete bitstream from all bundles in DSpace. May throw - * WebApplicationException, which can be caused by three exceptions. - * SQLException if there was a problem reading from database or removing - * from database. AuthorizeException, if user doesn't have permission to delete - * the bitstream or file. IOException, if there was a problem deleting the file. - * - * @param bitstreamId Id of bitstream to be deleted. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The header "rest-dspace-token" with the token passed - * from the login method must be set. - * @param request Servlet's HTTP request object. - * @return Return response codes: OK(200), NOT_FOUND(404) if bitstream of - * that id does not exist and UNAUTHORIZED(401) if user is not - * allowed to delete bitstream. - * @throws WebApplicationException Can be thrown if there was a problem reading or editing - * the database. Or problem deleting the file. Or problem with - * authorization to bitstream and bundles. Or problem with - * creating context. - */ - @DELETE - @Path("/{bitstream_id}") - public Response deleteBitstream(@PathParam("bitstream_id") String bitstreamId, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Deleting bitstream(id=" + bitstreamId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, - org.dspace.core.Constants.DELETE); - - writeStats(dspaceBitstream, UsageEvent.Action.DELETE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - log.trace("Deleting bitstream from all bundles."); - bitstreamService.delete(context, dspaceBitstream); - - context.complete(); - } catch (SQLException e) { - processException("Could not delete bitstream(id=" + bitstreamId + "), SQLException. Message: " + e, - context); - } catch (AuthorizeException e) { - processException("Could not delete bitstream(id=" + bitstreamId + "), AuthorizeException. Message: " + e, - context); - } catch (IOException e) { - processException("Could not delete bitstream(id=" + bitstreamId + "), IOException. Message: " + e, context); - } catch (ContextException e) { - processException( - "Could not delete bitstream(id=" + bitstreamId + "), ContextException. Message:" + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.info("Bitstream(id=" + bitstreamId + ") was successfully deleted."); - return Response.ok().build(); - } - - /** - * Delete policy. - * - * @param bitstreamId Id of the DSpace bitstream whose policy will be deleted. - * @param policyId Id of the policy to delete. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The header "rest-dspace-token" with the token passed - * from the login method must be set. - * @param request Servlet's HTTP request object. - * @return It returns Ok, if all was ok. Otherwise status code 500. - */ - @DELETE - @Path("/{bitstream_id}/policy/{policy_id}") - public javax.ws.rs.core.Response deleteBitstreamPolicy(@PathParam("bitstream_id") String bitstreamId, - @PathParam("policy_id") Integer policyId, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - log.info("Deleting policy(id=" + policyId + ") from bitstream(id=" + bitstreamId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Bitstream dspaceBitstream = findBitstream(context, bitstreamId, - org.dspace.core.Constants.WRITE); - - writeStats(dspaceBitstream, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers, - request, context); - - org.dspace.authorize.ResourcePolicy resourcePolicy = resourcePolicyService.find(context, policyId); - if (resourcePolicy.getdSpaceObject().getID().equals(dspaceBitstream.getID()) && authorizeService - .authorizeActionBoolean(context, dspaceBitstream, org.dspace.core.Constants.REMOVE)) { - - try { - resourcePolicyService.delete(context, resourcePolicy); - } catch (AuthorizeException e) { - processException( - "Someting went wrong while deleting policy(id=" + policyId + ") to bitstream(id=" + bitstreamId - + "), AuthorizeException! Message: " + e, context); - } - log.trace("Policy for bitstream(id=" + bitstreamId + ") was successfully removed."); - } - - context.complete(); - } catch (SQLException e) { - processException( - "Someting went wrong while deleting policy(id=" + policyId + ") to bitstream(id=" + bitstreamId - + "), SQLException! Message: " + e, context); - } catch (ContextException e) { - processException( - "Someting went wrong while deleting policy(id=" + policyId + ") to bitstream(id=" + bitstreamId - + "), ContextException. Message: " + e.getMessage(), context); - } finally { - processFinally(context); - } - - return Response.status(Status.OK).build(); - } - - /** - * Return the MIME type of the file, by file extension. - * - * @param name Name of file. - * @return String filled with type of file in MIME style. - */ - static String getMimeType(String name) { - return URLConnection.guessContentTypeFromName(name); - } - - /** - * Add policy(org.dspace.rest.common.ResourcePolicy) to bitstream. - * - * @param context Context to create DSpace ResourcePolicy. - * @param policy Policy which will be added to bitstream. - * @param dspaceBitstream DSpace Bitstream object. - * @throws SQLException An exception that provides information on a database access error or other errors. - * @throws AuthorizeException Exception indicating the current user of the context does not have permission - * to perform a particular action. - */ - private void addPolicyToBitstream(org.dspace.core.Context context, ResourcePolicy policy, - org.dspace.content.Bitstream dspaceBitstream) - throws SQLException, AuthorizeException { - org.dspace.authorize.ResourcePolicy dspacePolicy = resourcePolicyService.create(context); - dspacePolicy.setAction(policy.getActionInt()); - dspacePolicy.setGroup(groupService.findByIdOrLegacyId(context, policy.getGroupId())); - dspacePolicy.setdSpaceObject(dspaceBitstream); - dspacePolicy.setStartDate(policy.getStartDate()); - dspacePolicy.setEndDate(policy.getEndDate()); - dspacePolicy.setRpDescription(policy.getRpDescription()); - dspacePolicy.setRpName(policy.getRpName()); - - resourcePolicyService.update(context, dspacePolicy); - } - - /** - * Find bitstream from DSpace database. This encapsulates the - * org.dspace.content.Bitstream.find method with a check whether the item exists and - * whether the user logged into the context has permission to preform the requested action. - * - * @param context Context of actual logged user. - * @param id Id of bitstream in DSpace. - * @param action Constant from org.dspace.core.Constants. - * @return Returns DSpace bitstream. - * @throws WebApplicationException Is thrown when item with passed id is not exists and if user - * has no permission to do passed action. - */ - private org.dspace.content.Bitstream findBitstream(org.dspace.core.Context context, String id, int action) - throws WebApplicationException { - org.dspace.content.Bitstream bitstream = null; - try { - bitstream = bitstreamService.findByIdOrLegacyId(context, id); - - if ((bitstream == null) || (bitstreamService.getParentObject(context, bitstream) == null)) { - context.abort(); - log.warn("Bitstream(id=" + id + ") was not found!"); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } else if (!authorizeService.authorizeActionBoolean(context, bitstream, action)) { - context.abort(); - if (context.getCurrentUser() != null) { - log.error("User(" + context.getCurrentUser().getEmail() + ") doesn't have the permission to " - + getActionString(action) + " bitstream!"); - } else { - log.error( - "User(anonymous) doesn't have the permission to " + getActionString(action) + " bitsteam!"); - } - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - } catch (SQLException e) { - processException("Something went wrong while finding bitstream. SQLException, Message:" + e, context); - } - return bitstream; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/CollectionsResource.java b/dspace-rest/src/main/java/org/dspace/rest/CollectionsResource.java deleted file mode 100644 index 0edabe6ad26c..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/CollectionsResource.java +++ /dev/null @@ -1,754 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import static org.dspace.content.service.DSpaceObjectService.MD_COPYRIGHT_TEXT; -import static org.dspace.content.service.DSpaceObjectService.MD_INTRODUCTORY_TEXT; -import static org.dspace.content.service.DSpaceObjectService.MD_LICENSE; -import static org.dspace.content.service.DSpaceObjectService.MD_NAME; -import static org.dspace.content.service.DSpaceObjectService.MD_SHORT_DESCRIPTION; -import static org.dspace.content.service.DSpaceObjectService.MD_SIDEBAR_TEXT; - -import java.io.IOException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.AuthorizeException; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.ItemService; -import org.dspace.content.service.WorkspaceItemService; -import org.dspace.core.Constants; -import org.dspace.core.LogManager; -import org.dspace.rest.common.Collection; -import org.dspace.rest.common.Item; -import org.dspace.rest.common.MetadataEntry; -import org.dspace.rest.exceptions.ContextException; -import org.dspace.usage.UsageEvent; -import org.dspace.workflow.WorkflowService; -import org.dspace.workflow.factory.WorkflowServiceFactory; - -/** - * This class provides all CRUD operation over collections. - * - * @author Rostislav Novak (Computing and Information Centre, CTU in Prague) - */ -@Path("/collections") -public class CollectionsResource extends Resource { - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); - protected WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService(); - - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(CollectionsResource.class); - - /** - * Return instance of collection with passed id. You can add more properties - * through expand parameter. - * - * @param collectionId Id of collection in DSpace. - * @param expand String in which is what you want to add to returned instance - * of collection. Options are: "all", "parentCommunityList", - * "parentCommunity", "items", "license" and "logo". If you want - * to use multiple options, it must be separated by commas. - * @param limit Limit value for items in list in collection. Default value is - * 100. - * @param offset Offset of start index in list of items of collection. Default - * value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return instance of collection. It can also return status code - * NOT_FOUND(404) if id of collection is incorrect or status code - * UNATHORIZED(401) if user has no permission to read collection. - * @throws WebApplicationException It is thrown when was problem with database reading - * (SQLException) or problem with creating - * context(ContextException). It is thrown by NOT_FOUND and - * UNATHORIZED status codes, too. - */ - @GET - @Path("/{collection_id}") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public org.dspace.rest.common.Collection getCollection(@PathParam("collection_id") String collectionId, - @QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("100") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading collection(id=" + collectionId + ")."); - org.dspace.core.Context context = null; - Collection collection = null; - - try { - context = createContext(); - - org.dspace.content.Collection dspaceCollection = findCollection(context, collectionId, - org.dspace.core.Constants.READ); - writeStats(dspaceCollection, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, - headers, request, context); - - collection = new Collection(dspaceCollection, servletContext, expand, context, limit, offset); - context.complete(); - - } catch (SQLException e) { - processException("Could not read collection(id=" + collectionId + "), SQLException. Message: " + e, - context); - } catch (ContextException e) { - processException( - "Could not read collection(id=" + collectionId + "), ContextException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.trace("Collection(id=" + collectionId + ") has been successfully read."); - return collection; - } - - /** - * Return array of all collections in DSpace. You can add more properties - * through expand parameter. - * - * @param expand String in which is what you want to add to returned instance - * of collection. Options are: "all", "parentCommunityList", - * "parentCommunity", "items", "license" and "logo". If you want - * to use multiple options, it must be separated by commas. - * @param limit Limit value for items in list in collection. Default value is - * 100. - * @param offset Offset of start index in list of items of collection. Default - * value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the collections as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return array of collection, on which has logged user permission - * to view. - * @throws WebApplicationException It is thrown when was problem with database reading - * (SQLException) or problem with creating - * context(ContextException). - */ - @GET - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public org.dspace.rest.common.Collection[] getCollections(@QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("100") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading all collections.(offset=" + offset + ",limit=" + limit + ")"); - org.dspace.core.Context context = null; - List collections = new ArrayList<>(); - - try { - context = createContext(); - - if (!((limit != null) && (limit >= 0) && (offset != null) && (offset >= 0))) { - log.warn("Paging was badly set."); - limit = 100; - offset = 0; - } - - List dspaceCollections = collectionService.findAll(context, limit, offset); - for (org.dspace.content.Collection dspaceCollection : dspaceCollections) { - if (authorizeService - .authorizeActionBoolean(context, dspaceCollection, org.dspace.core.Constants.READ)) { - Collection collection = new org.dspace.rest.common.Collection(dspaceCollection, servletContext, - null, context, limit, - offset); - collections.add(collection); - writeStats(dspaceCollection, UsageEvent.Action.VIEW, user_ip, user_agent, - xforwardedfor, headers, request, context); - } - } - context.complete(); - } catch (SQLException e) { - processException("Something went wrong while reading collections from database. Message: " + e, context); - } catch (ContextException e) { - processException("Something went wrong while reading collections, ContextError. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.trace("All collections were successfully read."); - return collections.toArray(new org.dspace.rest.common.Collection[0]); - } - - /** - * Return array of items in collection. You can add more properties to items - * with expand parameter. - * - * @param collectionId Id of collection in DSpace. - * @param expand String which define, what additional properties will be in - * returned item. Options are separeted by commas and are: "all", - * "metadata", "parentCollection", "parentCollectionList", - * "parentCommunityList" and "bitstreams". - * @param limit Limit value for items in array. Default value is 100. - * @param offset Offset of start index in array of items of collection. Default - * value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return array of items, on which has logged user permission to - * read. It can also return status code NOT_FOUND(404) if id of - * collection is incorrect or status code UNATHORIZED(401) if user - * has no permission to read collection. - * @throws WebApplicationException It is thrown when was problem with database reading - * (SQLException) or problem with creating - * context(ContextException). It is thrown by NOT_FOUND and - * UNATHORIZED status codes, too. - */ - @GET - @Path("/{collection_id}/items") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public org.dspace.rest.common.Item[] getCollectionItems(@PathParam("collection_id") String collectionId, - @QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("100") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading collection(id=" + collectionId + ") items."); - org.dspace.core.Context context = null; - List items = null; - - try { - context = createContext(); - - org.dspace.content.Collection dspaceCollection = findCollection(context, collectionId, - org.dspace.core.Constants.READ); - writeStats(dspaceCollection, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, - headers, request, context); - - items = new ArrayList<>(); - Iterator dspaceItems = itemService.findByCollection(context, dspaceCollection, - limit, offset); - - while (dspaceItems.hasNext()) { - org.dspace.content.Item dspaceItem = dspaceItems.next(); - - if (itemService.isItemListedForUser(context, dspaceItem)) { - items.add(new Item(dspaceItem, servletContext, expand, context)); - writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, - headers, request, context); - } - } - - context.complete(); - } catch (SQLException e) { - processException("Could not read collection items, SQLException. Message: " + e, context); - } catch (ContextException e) { - processException("Could not read collection items, ContextException. Message: " + e.getMessage(), context); - } finally { - processFinally(context); - } - - log.trace("All items in collection(id=" + collectionId + ") were successfully read."); - return items.toArray(new Item[0]); - } - - /** - * Create item in collection. Item can be without filled metadata. - * - * @param collectionId Id of collection in which will be item created. - * @param item Item filled only with metadata, other variables are ignored. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return status code with item. Return status (OK)200 if item was - * created. NOT_FOUND(404) if id of collection does not exists. - * UNAUTHORIZED(401) if user have not permission to write items in - * collection. - * @throws WebApplicationException It is thrown when was problem with database reading or - * writing (SQLException) or problem with creating - * context(ContextException) or problem with authorization to - * collection or IOException or problem with index item into - * browse index. It is thrown by NOT_FOUND and UNATHORIZED - * status codes, too. - */ - @POST - @Path("/{collection_id}/items") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Item addCollectionItem(@PathParam("collection_id") String collectionId, Item item, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Create item in collection(id=" + collectionId + ")."); - org.dspace.core.Context context = null; - Item returnItem = null; - - try { - context = createContext(); - org.dspace.content.Collection dspaceCollection = findCollection(context, collectionId, - org.dspace.core.Constants.WRITE); - - writeStats(dspaceCollection, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - log.trace("Creating item in collection(id=" + collectionId + ")."); - org.dspace.content.WorkspaceItem workspaceItem = workspaceItemService - .create(context, dspaceCollection, false); - org.dspace.content.Item dspaceItem = workspaceItem.getItem(); - - log.trace("Adding metadata to item(id=" + dspaceItem.getID() + ")."); - if (item.getMetadata() != null) { - for (MetadataEntry entry : item.getMetadata()) { - String data[] = mySplit(entry.getKey()); - itemService.addMetadata(context, dspaceItem, data[0], data[1], data[2], entry.getLanguage(), - entry.getValue()); - } - } - - workspaceItemService.update(context, workspaceItem); - - try { - // Must insert the item into workflow - log.trace("Starting workflow for item(id=" + dspaceItem.getID() + ")."); - workflowService.start(context, workspaceItem); - } catch (Exception e) { - log.error( - LogManager.getHeader(context, "Error while starting workflow", "Item id: " + dspaceItem.getID()), - e); - throw new ContextException("Error while starting workflow for item(id=" + dspaceItem.getID() + ")", e); - } - - returnItem = new Item(workspaceItem.getItem(), servletContext, "", context); - - context.complete(); - - } catch (SQLException e) { - processException("Could not add item into collection(id=" + collectionId + "), SQLException. Message: " + e, - context); - } catch (AuthorizeException e) { - processException( - "Could not add item into collection(id=" + collectionId + "), AuthorizeException. Message: " + e, - context); - } catch (ContextException e) { - processException( - "Could not add item into collection(id=" + collectionId + "), ContextException. Message: " + e - .getMessage(), - context); - } finally { - processFinally(context); - } - - log.info( - "Item successfully created in collection(id=" + collectionId + "). Item handle=" + returnItem.getHandle()); - return returnItem; - } - - /** - * Update collection. It replace all properties. - * - * @param collectionId Id of collection in DSpace. - * @param collection Collection which will replace properties of actual collection. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response 200 if was everything all right. Otherwise 400 - * when id of community was incorrect or 401 if was problem with - * permission to write into collection. - * @throws WebApplicationException It is thrown when was problem with database reading or - * writing. Or problem with authorization to collection. Or - * problem with creating context. - */ - @PUT - @Path("/{collection_id}") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response updateCollection(@PathParam("collection_id") String collectionId, - org.dspace.rest.common.Collection collection, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Updating collection(id=" + collectionId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Collection dspaceCollection = findCollection(context, collectionId, - org.dspace.core.Constants.WRITE); - - writeStats(dspaceCollection, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_NAME, collection.getName(), null); - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_LICENSE, collection.getLicense(), null); - - // dspaceCollection.setLogo(collection.getLogo()); // TODO Add this option. - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_COPYRIGHT_TEXT, collection.getCopyrightText(), null); - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_INTRODUCTORY_TEXT, collection.getIntroductoryText(), null); - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_SHORT_DESCRIPTION, collection.getShortDescription(), null); - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_SIDEBAR_TEXT, collection.getSidebarText(), null); - collectionService.update(context, dspaceCollection); - - context.complete(); - - } catch (ContextException e) { - processException( - "Could not update collection(id=" + collectionId + "), ContextException. Message: " + e.getMessage(), - context); - } catch (SQLException e) { - processException("Could not update collection(id=" + collectionId + "), SQLException. Message: " + e, - context); - } catch (AuthorizeException e) { - processException("Could not update collection(id=" + collectionId + "), AuthorizeException. Message: " + e, - context); - } finally { - processFinally(context); - } - - log.info("Collection(id=" + collectionId + ") successfully updated."); - return Response.ok().build(); - } - - /** - * Delete collection. - * - * @param collectionId Id of collection which will be deleted. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response code OK(200) if was everything all right. - * Otherwise return NOT_FOUND(404) if was id of community or - * collection incorrect. Or (UNAUTHORIZED)401 if was problem with - * permission to community or collection. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading or writing. Or problem with deleting - * collection caused by IOException or authorization. - */ - @DELETE - @Path("/{collection_id}") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response deleteCollection(@PathParam("collection_id") String collectionId, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Delete collection(id=" + collectionId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Collection dspaceCollection = findCollection(context, collectionId, - org.dspace.core.Constants.DELETE); - - writeStats(dspaceCollection, UsageEvent.Action.REMOVE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - collectionService.delete(context, dspaceCollection); - collectionService.update(context, dspaceCollection); - - context.complete(); - } catch (ContextException e) { - processException( - "Could not delete collection(id=" + collectionId + "), ContextException. Message: " + e.getMessage(), - context); - } catch (SQLException e) { - processException("Could not delete collection(id=" + collectionId + "), SQLException. Message: " + e, - context); - } catch (AuthorizeException e) { - processException("Could not delete collection(id=" + collectionId + "), AuthorizeException. Message: " + e, - context); - } catch (IOException e) { - processException("Could not delete collection(id=" + collectionId + "), IOException. Message: " + e, - context); - } finally { - processFinally(context); - } - - log.info("Collection(id=" + collectionId + ") was successfully deleted."); - return Response.ok().build(); - } - - /** - * Delete item in collection. - * - * @param collectionId Id of collection which will be deleted. - * @param itemId Id of item in colletion. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return It returns status code: OK(200). NOT_FOUND(404) if item or - * collection was not found, UNAUTHORIZED(401) if user is not - * allowed to delete item or permission to write into collection. - * @throws WebApplicationException It can be thrown by: SQLException, when was problem with - * database reading or writting. AuthorizeException, when was - * problem with authorization to item or collection. - * IOException, when was problem with removing item. - * ContextException, when was problem with creating context of - * DSpace. - */ - @DELETE - @Path("/{collection_id}/items/{item_id}") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response deleteCollectionItem(@PathParam("collection_id") String collectionId, - @PathParam("item_id") String itemId, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Delete item(id=" + itemId + ") in collection(id=" + collectionId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - - org.dspace.content.Collection dspaceCollection = collectionService - .findByIdOrLegacyId(context, collectionId); - org.dspace.content.Item item = itemService.findByIdOrLegacyId(context, itemId); - - - if (dspaceCollection == null) { - //throw collection not exist - log.warn("Collection(id=" + itemId + ") was not found!"); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - - if (item == null) { - //throw item not exist - log.warn("Item(id=" + itemId + ") was not found!"); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - - if (!authorizeService.authorizeActionBoolean(context, item, Constants.REMOVE) - || !authorizeService.authorizeActionBoolean(context, dspaceCollection, Constants.REMOVE)) { - //throw auth - if (context.getCurrentUser() != null) { - log.error( - "User(" + context.getCurrentUser().getEmail() + ") does not have permission to delete item!"); - } else { - log.error("User(anonymous) has not permission to delete item!"); - } - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - collectionService.removeItem(context, dspaceCollection, item); - collectionService.update(context, dspaceCollection); - itemService.update(context, item); - - writeStats(dspaceCollection, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - writeStats(item, UsageEvent.Action.REMOVE, user_ip, user_agent, xforwardedfor, headers, request, context); - - context.complete(); - - } catch (ContextException e) { - processException("Could not delete item(id=" + itemId + ") in collection(id=" + collectionId - + "), ContextException. Message: " + e.getMessage(), context); - } catch (SQLException e) { - processException("Could not delete item(id=" + itemId + ") in collection(id=" + collectionId - + "), SQLException. Message: " + e, context); - } catch (AuthorizeException e) { - processException("Could not delete item(id=" + itemId + ") in collection(id=" + collectionId - + "), AuthorizeException. Message: " + e, context); - } catch (IOException e) { - processException("Could not delete item(id=" + itemId + ") in collection(id=" + collectionId - + "), IOException. Message: " + e, context); - } finally { - processFinally(context); - } - - log.info("Item(id=" + itemId + ") in collection(id=" + collectionId + ") was successfully deleted."); - return Response.ok().build(); - } - - /** - * Search for first collection with passed name. - * - * @param name Name of collection. - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @return It returns null if collection was not found. Otherwise returns - * first founded collection. - * @throws WebApplicationException A general exception a servlet can throw when it encounters difficulty. - */ - @POST - @Path("/find-collection") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Collection findCollectionByName(String name, @Context HttpHeaders headers) throws WebApplicationException { - log.info("Searching for first collection with name=" + name + "."); - org.dspace.core.Context context = null; - Collection collection = null; - - try { - context = createContext(); - - List dspaceCollections = collectionService.findAll(context); - //TODO, this would be more efficient with a findByName query - - for (org.dspace.content.Collection dspaceCollection : dspaceCollections) { - if (authorizeService - .authorizeActionBoolean(context, dspaceCollection, org.dspace.core.Constants.READ)) { - if (dspaceCollection.getName().equals(name)) { - collection = new Collection(dspaceCollection, servletContext, "", context, 100, 0); - break; - } - } - } - - context.complete(); - - } catch (SQLException e) { - processException( - "Something went wrong while searching for collection(name=" + name + ") from database. Message: " - + e, context); - } catch (ContextException e) { - processException( - "Something went wrong while searching for collection(name=" + name + "), ContextError. Message: " - + e.getMessage(), context); - } finally { - processFinally(context); - } - - if (collection == null) { - log.info("Collection was not found."); - } else { - log.info("Collection was found with id(" + collection.getUUID() + ")."); - } - return collection; - } - - /** - * Find collection from DSpace database. It is encapsulation of method - * org.dspace.content.Collection.find with checking if item exist and if - * user logged into context has permission to do passed action. - * - * @param context Context of actual logged user. - * @param id Id of collection in DSpace. - * @param action Constant from org.dspace.core.Constants. - * @return It returns DSpace collection. - * @throws WebApplicationException Is thrown when item with passed id is not exists and if user - * has no permission to do passed action. - */ - private org.dspace.content.Collection findCollection(org.dspace.core.Context context, String id, int action) - throws WebApplicationException { - org.dspace.content.Collection collection = null; - try { - collection = collectionService.findByIdOrLegacyId(context, id); - - if (collection == null) { - context.abort(); - log.warn("Collection(id=" + id + ") was not found!"); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } else if (!authorizeService.authorizeActionBoolean(context, collection, action)) { - context.abort(); - if (context.getCurrentUser() != null) { - log.error("User(" + context.getCurrentUser().getEmail() + ") has not permission to " - + getActionString(action) + " collection!"); - } else { - log.error("User(anonymous) has not permission to " + getActionString(action) + " collection!"); - } - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - } catch (SQLException e) { - processException("Something get wrong while finding collection(id=" + id + "). SQLException, Message: " + e, - context); - } - return collection; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/CommunitiesResource.java b/dspace-rest/src/main/java/org/dspace/rest/CommunitiesResource.java deleted file mode 100644 index c3d4840910d9..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/CommunitiesResource.java +++ /dev/null @@ -1,1052 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import static org.dspace.content.service.DSpaceObjectService.MD_COPYRIGHT_TEXT; -import static org.dspace.content.service.DSpaceObjectService.MD_INTRODUCTORY_TEXT; -import static org.dspace.content.service.DSpaceObjectService.MD_LICENSE; -import static org.dspace.content.service.DSpaceObjectService.MD_NAME; -import static org.dspace.content.service.DSpaceObjectService.MD_SHORT_DESCRIPTION; -import static org.dspace.content.service.DSpaceObjectService.MD_SIDEBAR_TEXT; - -import java.io.IOException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.AuthorizeException; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.CommunityService; -import org.dspace.rest.common.Collection; -import org.dspace.rest.common.Community; -import org.dspace.rest.exceptions.ContextException; -import org.dspace.usage.UsageEvent; - -/** - * Class which provides CRUD methods over communities. - * - * @author Rostislav Novak (Computing and Information Centre, CTU in Prague) - */ -@Path("/communities") -public class CommunitiesResource extends Resource { - protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(CommunitiesResource.class); - - /** - * Returns community with basic properties. If you want more, use expand - * parameter or method for community collections or subcommunities. - * - * @param communityId Id of community in DSpace. - * @param expand String in which is what you want to add to returned instance - * of community. Options are: "all", "parentCommunity", - * "collections", "subCommunities" and "logo". If you want to use - * multiple options, it must be separated by commas. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return instance of org.dspace.rest.common.Community. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading. Also if id of community is incorrect - * or logged user into context has no permission to read. - */ - @GET - @Path("/{community_id}") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Community getCommunity(@PathParam("community_id") String communityId, @QueryParam("expand") String expand, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading community(id=" + communityId + ")."); - org.dspace.core.Context context = null; - Community community = null; - - try { - context = createContext(); - - org.dspace.content.Community dspaceCommunity = findCommunity(context, communityId, - org.dspace.core.Constants.READ); - writeStats(dspaceCommunity, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers, - request, context); - - community = new Community(dspaceCommunity, servletContext, expand, context); - context.complete(); - - } catch (SQLException e) { - processException("Could not read community(id=" + communityId + "), SQLException. Message:" + e, context); - } catch (ContextException e) { - processException( - "Could not read community(id=" + communityId + "), ContextException. Message:" + e.getMessage(), - context); - } finally { - processFinally(context); - } - - - log.trace("Community(id=" + communityId + ") was successfully read."); - return community; - } - - /** - * Return all communities in DSpace. - * - * @param expand String in which is what you want to add to returned instance - * of community. Options are: "all", "parentCommunity", - * "collections", "subCommunities" and "logo". If you want to use - * multiple options, it must be separated by commas. - * @param limit Maximum communities in array. Default value is 100. - * @param offset Index from which will start array of communities. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return array of communities. - * @throws WebApplicationException It can be caused by creating context or while was problem - * with reading community from database(SQLException). - */ - @GET - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Community[] getCommunities(@QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("100") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading all communities.(offset=" + offset + " ,limit=" + limit + ")."); - org.dspace.core.Context context = null; - ArrayList communities = null; - - try { - context = createContext(); - - List dspaceCommunities = communityService.findAll(context); - communities = new ArrayList<>(); - - if (!((limit != null) && (limit >= 0) && (offset != null) && (offset >= 0))) { - log.warn("Paging was badly set, using default values."); - limit = 100; - offset = 0; - } - - for (int i = offset; (i < (offset + limit)) && i < dspaceCommunities.size(); i++) { - if (authorizeService - .authorizeActionBoolean(context, dspaceCommunities.get(i), org.dspace.core.Constants.READ)) { - Community community = new Community(dspaceCommunities.get(i), servletContext, expand, context); - writeStats(dspaceCommunities.get(i), UsageEvent.Action.VIEW, user_ip, user_agent, - xforwardedfor, headers, request, context); - communities.add(community); - } - } - - context.complete(); - } catch (SQLException e) { - processException("Could not read communities, SQLException. Message:" + e, context); - } catch (ContextException e) { - processException("Could not read communities, ContextException. Message:" + e.getMessage(), context); - } finally { - processFinally(context); - } - - log.trace("All communities successfully read."); - return communities.toArray(new Community[0]); - } - - /** - * Return all top communities in DSpace. Top communities are communities on - * the root of tree. - * - * @param expand String in which is what you want to add to returned instance - * of community. Options are: "all", "parentCommunity", - * "collections", "subCommunities" and "logo". If you want to use - * multiple options, it must be separated by commas. - * @param limit Maximum communities in array. Default value is 100. - * @param offset Index from which will start array of communities. Default - * value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return array of top communities. - * @throws WebApplicationException It can be caused by creating context or while was problem - * with reading community from database(SQLException). - */ - @GET - @Path("/top-communities") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Community[] getTopCommunities(@QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("20") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading all top communities.(offset=" + offset + " ,limit=" + limit + ")."); - org.dspace.core.Context context = null; - ArrayList communities = null; - - try { - context = createContext(); - - List dspaceCommunities = communityService.findAllTop(context); - communities = new ArrayList<>(); - - if (!((limit != null) && (limit >= 0) && (offset != null) && (offset >= 0))) { - log.warn("Paging was badly set, using default values."); - limit = 100; - offset = 0; - } - - for (int i = offset; (i < (offset + limit)) && i < dspaceCommunities.size(); i++) { - if (authorizeService - .authorizeActionBoolean(context, dspaceCommunities.get(i), org.dspace.core.Constants.READ)) { - Community community = new Community(dspaceCommunities.get(i), servletContext, expand, context); - writeStats(dspaceCommunities.get(i), UsageEvent.Action.VIEW, user_ip, user_agent, - xforwardedfor, headers, request, context); - communities.add(community); - } - } - - context.complete(); - } catch (SQLException e) { - processException("Could not read top communities, SQLException. Message:" + e, context); - } catch (ContextException e) { - processException("Could not read top communities, ContextException. Message:" + e.getMessage(), context); - } finally { - processFinally(context); - } - - log.trace("All top communities successfully read."); - return communities.toArray(new Community[0]); - } - - /** - * Return all collections of community. - * - * @param communityId Id of community in DSpace. - * @param expand String in which is what you want to add to returned instance - * of collection. Options are: "all", "parentCommunityList", - * "parentCommunity", "items", "license" and "logo". If you want - * to use multiple options, it must be separated by commas. - * @param limit Maximum collection in array. Default value is 100. - * @param offset Index from which will start array of collections. Default - * value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return array of collections of community. - * @throws WebApplicationException It can be caused by creating context or while was problem - * with reading community from database(SQLException). - */ - @GET - @Path("/{community_id}/collections") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Collection[] getCommunityCollections(@PathParam("community_id") String communityId, - @QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("100") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading community(id=" + communityId + ") collections."); - org.dspace.core.Context context = null; - ArrayList collections = null; - - try { - context = createContext(); - - org.dspace.content.Community dspaceCommunity = findCommunity(context, communityId, - org.dspace.core.Constants.READ); - writeStats(dspaceCommunity, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers, - request, context); - - if (!((limit != null) && (limit >= 0) && (offset != null) && (offset >= 0))) { - log.warn("Pagging was badly set, using default values."); - limit = 100; - offset = 0; - } - - collections = new ArrayList<>(); - List dspaceCollections = dspaceCommunity.getCollections(); - for (int i = offset; (i < (offset + limit)) && (i < dspaceCollections.size()); i++) { - if (authorizeService - .authorizeActionBoolean(context, dspaceCollections.get(i), org.dspace.core.Constants.READ)) { - collections.add(new Collection(dspaceCollections.get(i), servletContext, expand, context, 20, 0)); - writeStats(dspaceCollections.get(i), UsageEvent.Action.VIEW, user_ip, user_agent, - xforwardedfor, headers, request, context); - } - } - - context.complete(); - } catch (SQLException e) { - processException("Could not read community(id=" + communityId + ") collections, SQLException. Message:" + e, - context); - } catch (ContextException e) { - processException( - "Could not read community(id=" + communityId + ") collections, ContextException. Message:" + e - .getMessage(), - context); - } finally { - processFinally(context); - } - - log.trace("Community(id=" + communityId + ") collections were successfully read."); - return collections.toArray(new Collection[0]); - } - - /** - * Return all subcommunities of community. - * - * @param communityId Id of community in DSpace. - * @param expand String in which is what you want to add to returned instance - * of community. Options are: "all", "parentCommunity", - * "collections", "subCommunities" and "logo". If you want to use - * multiple options, it must be separated by commas. - * @param limit Maximum communities in array. Default value is 20. - * @param offset Index from which will start array of communities. Default - * value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return array of subcommunities of community. - * @throws WebApplicationException It can be caused by creating context or while was problem - * with reading community from database(SQLException). - */ - @GET - @Path("/{community_id}/communities") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Community[] getCommunityCommunities(@PathParam("community_id") String communityId, - @QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("20") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading community(id=" + communityId + ") subcommunities."); - org.dspace.core.Context context = null; - ArrayList communities = null; - - try { - context = createContext(); - - org.dspace.content.Community dspaceCommunity = findCommunity(context, communityId, - org.dspace.core.Constants.READ); - writeStats(dspaceCommunity, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers, - request, context); - - if (!((limit != null) && (limit >= 0) && (offset != null) && (offset >= 0))) { - log.warn("Pagging was badly set, using default values."); - limit = 100; - offset = 0; - } - - communities = new ArrayList<>(); - List dspaceCommunities = dspaceCommunity.getSubcommunities(); - for (int i = offset; (i < (offset + limit)) && (i < dspaceCommunities.size()); i++) { - if (authorizeService - .authorizeActionBoolean(context, dspaceCommunities.get(i), org.dspace.core.Constants.READ)) { - communities.add(new Community(dspaceCommunities.get(i), servletContext, expand, context)); - writeStats(dspaceCommunities.get(i), UsageEvent.Action.VIEW, user_ip, user_agent, - xforwardedfor, headers, request, context); - } - } - - context.complete(); - } catch (SQLException e) { - processException( - "Could not read community(id=" + communityId + ") subcommunities, SQLException. Message:" + e, - context); - } catch (ContextException e) { - processException( - "Could not read community(id=" + communityId + ") subcommunities, ContextException. Message:" - + e.getMessage(), context); - } finally { - processFinally(context); - } - - log.trace("Community(id=" + communityId + ") subcommunities were successfully read."); - return communities.toArray(new Community[0]); - } - - /** - * Create community at top level. Creating community at top level has - * permission only admin. - * - * @param community Community which will be created at top level of communities. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Returns response with handle of community, if was all ok. - * @throws WebApplicationException It can be thrown by SQLException, AuthorizeException and - * ContextException. - */ - @POST - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Community createCommunity(Community community, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Creating community at top level."); - org.dspace.core.Context context = null; - Community retCommunity = null; - - try { - context = createContext(); - if (!authorizeService.isAdmin(context)) { - context.abort(); - String user = "anonymous"; - if (context.getCurrentUser() != null) { - user = context.getCurrentUser().getEmail(); - } - log.error("User(" + user + ") has not permission to create community!"); - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - org.dspace.content.Community dspaceCommunity = communityService.create(null, context); - writeStats(dspaceCommunity, UsageEvent.Action.CREATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_NAME, community.getName(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_COPYRIGHT_TEXT, community.getCopyrightText(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_INTRODUCTORY_TEXT, community.getIntroductoryText(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_SHORT_DESCRIPTION, community.getShortDescription(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_SIDEBAR_TEXT, community.getSidebarText(), null); - communityService.update(context, dspaceCommunity); - - retCommunity = new Community(dspaceCommunity, servletContext, "", context); - context.complete(); - } catch (SQLException e) { - processException("Could not create new top community, SQLException. Message: " + e, context); - } catch (ContextException e) { - processException("Could not create new top community, ContextException. Message: " + e.getMessage(), - context); - } catch (AuthorizeException e) { - processException("Could not create new top community, AuthorizeException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - - log.info("Community at top level has been successfully created. Handle:" + retCommunity.getHandle()); - return retCommunity; - } - - /** - * Create collection in community. - * - * @param communityId Id of community in DSpace. - * @param collection Collection which will be added into community. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response 200 if was everything all right. Otherwise 400 - * when id of community was incorrect or 401 if was problem with - * permission to write into collection. - * @throws WebApplicationException It is thrown when was problem with database reading or - * writing. Or problem with authorization to community. Or - * problem with creating context. - */ - @POST - @Path("/{community_id}/collections") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Collection addCommunityCollection(@PathParam("community_id") String communityId, Collection collection, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Adding collection into community(id=" + communityId + ")."); - org.dspace.core.Context context = null; - Collection retCollection = null; - - try { - context = createContext(); - - org.dspace.content.Community dspaceCommunity = findCommunity(context, communityId, - org.dspace.core.Constants.WRITE); - writeStats(dspaceCommunity, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - org.dspace.content.Collection dspaceCollection = collectionService.create(context, dspaceCommunity); - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_LICENSE, collection.getLicense(), null); - // dspaceCollection.setLogo(collection.getLogo()); // TODO Add this option. - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_NAME, collection.getName(), null); - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_COPYRIGHT_TEXT, collection.getCopyrightText(), null); - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_INTRODUCTORY_TEXT, collection.getIntroductoryText(), null); - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_SHORT_DESCRIPTION, collection.getShortDescription(), null); - collectionService.setMetadataSingleValue(context, dspaceCollection, - MD_SIDEBAR_TEXT, collection.getSidebarText(), null); - collectionService.update(context, dspaceCollection); - communityService.update(context, dspaceCommunity); - retCollection = new Collection(dspaceCollection, servletContext, "", context, 100, 0); - context.complete(); - - } catch (SQLException e) { - processException( - "Could not add collection into community(id=" + communityId + "), SQLException. Message:" + e, - context); - } catch (AuthorizeException e) { - processException( - "Could not add collection into community(id=" + communityId + "), AuthorizeException. Message:" + e, - context); - } catch (ContextException e) { - processException( - "Could not add collection into community(id=" + communityId + "), ContextException. Message:" - + e.getMessage(), context); - } finally { - processFinally(context); - } - - - log.info("Collection was successfully added into community(id=" + communityId + "). Collection handle=" - + retCollection.getHandle()); - return retCollection; - } - - /** - * Create subcommunity in community. - * - * @param communityId Id of community in DSpace, in which will be created - * subcommunity. - * @param community Community which will be added into community. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response 200 if was everything all right. Otherwise 400 - * when id of community was incorrect or 401 if was problem with - * permission to write into collection. - * @throws WebApplicationException It is thrown when was problem with database reading or - * writing. Or problem with authorization to community. Or - * problem with creating context. - */ - @POST - @Path("/{community_id}/communities") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Community addCommunityCommunity(@PathParam("community_id") String communityId, Community community, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Add subcommunity into community(id=" + communityId + ")."); - org.dspace.core.Context context = null; - Community retCommunity = null; - - try { - context = createContext(); - org.dspace.content.Community dspaceParentCommunity = findCommunity(context, communityId, - org.dspace.core.Constants.WRITE); - - writeStats(dspaceParentCommunity, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - org.dspace.content.Community dspaceCommunity = communityService - .createSubcommunity(context, dspaceParentCommunity); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_NAME, community.getName(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_COPYRIGHT_TEXT, community.getCopyrightText(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_INTRODUCTORY_TEXT, community.getIntroductoryText(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_SHORT_DESCRIPTION, community.getShortDescription(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_SIDEBAR_TEXT, community.getSidebarText(), null); - communityService.update(context, dspaceCommunity); - communityService.update(context, dspaceParentCommunity); - - retCommunity = new Community(dspaceCommunity, servletContext, "", context); - context.complete(); - - } catch (SQLException e) { - processException( - "Could not add subcommunity into community(id=" + communityId + "), SQLException. Message:" + e, - context); - } catch (AuthorizeException e) { - processException( - "Could not add subcommunity into community(id=" + communityId + "), AuthorizeException. Message:" - + e, context); - } catch (ContextException e) { - processException( - "Could not add subcommunity into community(id=" + communityId + "), ContextException. Message:" + e, - context); - } finally { - processFinally(context); - } - - - log.info("Subcommunity was successfully added in community(id=" + communityId + ")."); - return retCommunity; - } - - /** - * Update community. Replace all information about community except: id, - * handle and expandle items. - * - * @param communityId Id of community in DSpace. - * @param community Instance of community which will replace actual community in - * DSpace. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Response 200 if was all ok. Otherwise 400 if id was incorrect or - * 401 if logged user has no permission to delete community. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading or writing. Or problem with writing to - * community caused by authorization. - */ - @PUT - @Path("/{community_id}") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response updateCommunity(@PathParam("community_id") String communityId, Community community, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Updating community(id=" + communityId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - - org.dspace.content.Community dspaceCommunity = findCommunity(context, communityId, - org.dspace.core.Constants.WRITE); - writeStats(dspaceCommunity, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - // dspaceCommunity.setLogo(arg0); // TODO Add this option. - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_NAME, community.getName(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_COPYRIGHT_TEXT, community.getCopyrightText(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_INTRODUCTORY_TEXT, community.getIntroductoryText(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_SHORT_DESCRIPTION, community.getShortDescription(), null); - communityService.setMetadataSingleValue(context, dspaceCommunity, - MD_SIDEBAR_TEXT, community.getSidebarText(), null); - communityService.update(context, dspaceCommunity); - context.complete(); - - } catch (SQLException e) { - processException("Could not update community(id=" + communityId + "), AuthorizeException. Message:" + e, - context); - } catch (ContextException e) { - processException("Could not update community(id=" + communityId + "), ContextException Message:" + e, - context); - } catch (AuthorizeException e) { - processException("Could not update community(id=" + communityId + "), AuthorizeException Message:" + e, - context); - } finally { - processFinally(context); - } - - log.info("Community(id=" + communityId + ") has been successfully updated."); - return Response.ok().build(); - } - - /** - * Delete community from DSpace. It delete it everything with community! - * - * @param communityId Id of community in DSpace. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response code OK(200) if was everything all right. - * Otherwise return NOT_FOUND(404) if was id of community incorrect. - * Or (UNAUTHORIZED)401 if was problem with permission to community. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading or deleting. Or problem with deleting - * community caused by IOException or authorization. - */ - @DELETE - @Path("/{community_id}") - public Response deleteCommunity(@PathParam("community_id") String communityId, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Deleting community(id=" + communityId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - - org.dspace.content.Community community = findCommunity(context, communityId, - org.dspace.core.Constants.DELETE); - writeStats(community, UsageEvent.Action.DELETE, user_ip, user_agent, xforwardedfor, headers, - request, context); - - communityService.delete(context, community); - communityService.update(context, community); - context.complete(); - - } catch (SQLException e) { - processException("Could not delete community(id=" + communityId + "), SQLException. Message:" + e, context); - } catch (AuthorizeException e) { - processException("Could not delete community(id=" + communityId + "), AuthorizeException. Message:" + e, - context); - } catch (IOException e) { - processException("Could not delete community(id=" + communityId + "), IOException. Message:" + e, context); - } catch (ContextException e) { - processException( - "Could not delete community(id=" + communityId + "), ContextException. Message:" + e.getMessage(), - context); - } finally { - processFinally(context); - } - - - log.info("Community(id=" + communityId + ") was successfully deleted."); - return Response.status(Response.Status.OK).build(); - } - - /** - * Delete collection in community. - * - * @param communityId Id of community in DSpace. - * @param collectionId Id of collection which will be deleted. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response code OK(200) if was everything all right. - * Otherwise return NOT_FOUND(404) if was id of community or - * collection incorrect. Or (UNAUTHORIZED)401 if was problem with - * permission to community or collection. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading or deleting. Or problem with deleting - * collection caused by IOException or authorization. - */ - @DELETE - @Path("/{community_id}/collections/{collection_id}") - public Response deleteCommunityCollection(@PathParam("community_id") String communityId, - @PathParam("collection_id") String collectionId, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Deleting collection(id=" + collectionId + ") in community(id=" + communityId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - - org.dspace.content.Community community = findCommunity(context, communityId, - org.dspace.core.Constants.WRITE); - org.dspace.content.Collection collection = collectionService.findByIdOrLegacyId(context, collectionId); - - if (collection == null) { - context.abort(); - log.warn("Collection(id=" + collectionId + ") was not found!"); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } else if (!authorizeService - .authorizeActionBoolean(context, collection, org.dspace.core.Constants.REMOVE)) { - context.abort(); - if (context.getCurrentUser() != null) { - log.error( - "User(" + context.getCurrentUser().getEmail() + ") has not permission to delete collection!"); - } else { - log.error("User(anonymous) has not permission to delete collection!"); - } - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - communityService.removeCollection(context, community, collection); - communityService.update(context, community); - collectionService.update(context, collection); - - writeStats(community, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers, - request, context); - writeStats(collection, UsageEvent.Action.DELETE, user_ip, user_agent, xforwardedfor, headers, - request, context); - - context.complete(); - - } catch (SQLException e) { - processException("Could not delete collection(id=" + collectionId + ") in community(id=" + communityId - + "), SQLException. Message:" + e, context); - } catch (AuthorizeException e) { - processException("Could not delete collection(id=" + collectionId + ") in community(id=" + communityId - + "), AuthorizeException. Message:" + e, context); - } catch (IOException e) { - processException("Could not delete collection(id=" + collectionId + ") in community(id=" + communityId - + "), IOException. Message:" + e, context); - } catch (ContextException e) { - processException("Could not delete collection(id=" + collectionId + ") in community(id=" + communityId - + "), ContextExcpetion. Message:" + e.getMessage(), context); - } finally { - processFinally(context); - } - - - log.info("Collection(id=" + collectionId + ") in community(id=" + communityId + ") was successfully deleted."); - return Response.status(Response.Status.OK).build(); - } - - /** - * Delete subcommunity in community. - * - * @param parentCommunityId Id of community in DSpace. - * @param subcommunityId Id of community which will be deleted. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response code OK(200) if was everything all right. - * Otherwise return NOT_FOUND(404) if was id of community or - * subcommunity incorrect. Or (UNAUTHORIZED)401 if was problem with - * permission to community or subcommunity. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading or deleting. Or problem with deleting - * subcommunity caused by IOException or authorization. - */ - @DELETE - @Path("/{community_id}/communities/{community_id2}") - public Response deleteCommunityCommunity(@PathParam("community_id") String parentCommunityId, - @PathParam("community_id2") String subcommunityId, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Deleting community(id=" + parentCommunityId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - - org.dspace.content.Community parentCommunity = findCommunity(context, parentCommunityId, - org.dspace.core.Constants.WRITE); - org.dspace.content.Community subcommunity = communityService.findByIdOrLegacyId(context, subcommunityId); - - if (subcommunity == null) { - context.abort(); - log.warn("Subcommunity(id=" + subcommunityId + ") in community(id=" + ") was not found!"); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } else if (!authorizeService - .authorizeActionBoolean(context, subcommunity, org.dspace.core.Constants.REMOVE)) { - context.abort(); - if (context.getCurrentUser() != null) { - log.error( - "User(" + context.getCurrentUser().getEmail() + ") has not permission to delete community!"); - } else { - log.error("User(anonymous) has not permission to delete community!"); - } - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - communityService.removeSubcommunity(context, parentCommunity, subcommunity); - communityService.update(context, parentCommunity); - communityService.update(context, subcommunity); - - writeStats(parentCommunity, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - writeStats(subcommunity, UsageEvent.Action.DELETE, user_ip, user_agent, xforwardedfor, headers, - request, context); - - context.complete(); - - } catch (SQLException e) { - processException( - "Could not delete subcommunity(id=" + subcommunityId + ") in community(id=" + parentCommunityId - + "), SQLException. Message:" + e, context); - } catch (AuthorizeException e) { - processException( - "Could not delete subcommunity(id=" + subcommunityId + ") in community(id=" + parentCommunityId - + "), AuthorizeException. Message:" + e, context); - } catch (IOException e) { - processException( - "Could not delete subcommunity(id=" + subcommunityId + ") in community(id=" + parentCommunityId - + "), IOException. Message:" + e, context); - } catch (ContextException e) { - processException( - "Could not delete subcommunity(id=" + subcommunityId + ") in community(id=" + parentCommunityId - + "), ContextException. Message:" + e.getMessage(), context); - } finally { - processFinally(context); - } - - - log.info("Subcommunity(id=" + subcommunityId + ") from community(id=" + parentCommunityId - + ") was successfully deleted."); - return Response.status(Response.Status.OK).build(); - } - - /** - * Find community from DSpace database. It is encapsulation of method - * org.dspace.content.Community.find with checking if item exist and if user - * logged into context has permission to do passed action. - * - * @param context Context of actual logged user. - * @param id Id of community in DSpace. - * @param action Constant from org.dspace.core.Constants. - * @return It returns DSpace collection. - * @throws WebApplicationException Is thrown when item with passed id is not exists and if user - * has no permission to do passed action. - */ - private org.dspace.content.Community findCommunity(org.dspace.core.Context context, String id, int action) - throws WebApplicationException { - org.dspace.content.Community community = null; - try { - community = communityService.findByIdOrLegacyId(context, id); - - if (community == null) { - context.abort(); - log.warn("Community(id=" + id + ") was not found!"); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } else if (!authorizeService.authorizeActionBoolean(context, community, action)) { - context.abort(); - if (context.getCurrentUser() != null) { - log.error("User(" + context.getCurrentUser().getEmail() + ") has not permission to " - + getActionString(action) + " community!"); - } else { - log.error("User(anonymous) has not permission to " + getActionString(action) + " community!"); - } - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - } catch (SQLException e) { - processException("Something get wrong while finding community(id=" + id + "). SQLException, Message:" + e, - context); - } - return community; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/DSpaceRestApplication.java b/dspace-rest/src/main/java/org/dspace/rest/DSpaceRestApplication.java deleted file mode 100644 index baa5c8555b05..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/DSpaceRestApplication.java +++ /dev/null @@ -1,19 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import org.glassfish.jersey.jackson.JacksonFeature; -import org.glassfish.jersey.server.ResourceConfig; - -public class DSpaceRestApplication extends ResourceConfig { - - public DSpaceRestApplication() { - register(JacksonFeature.class); - packages("org.dspace.rest"); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/FilteredCollectionsResource.java b/dspace-rest/src/main/java/org/dspace/rest/FilteredCollectionsResource.java deleted file mode 100644 index 133ed50d9cdb..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/FilteredCollectionsResource.java +++ /dev/null @@ -1,215 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; -import javax.servlet.ServletContext; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.rest.common.FilteredCollection; -import org.dspace.rest.exceptions.ContextException; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; -import org.dspace.usage.UsageEvent; - -/* - * This class provides the items within a collection evaluated against a set of Item Filters. - * - * @author Terry Brady, Georgetown University - */ -@Path("/filtered-collections") -public class FilteredCollectionsResource extends Resource { - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(FilteredCollectionsResource.class); - - /** - * Return array of all collections in DSpace. You can add more properties - * through expand parameter. - * - * @param expand String in which is what you want to add to returned instance - * of collection. Options are: "all", "parentCommunityList", - * "parentCommunity", "topCommunity", "items", "license" and "logo". - * If you want to use multiple options, it must be separated by commas. - * @param limit Limit value for items in list in collection. Default value is - * 100. - * @param offset Offset of start index in list of items of collection. Default - * value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param filters Comma separated list of Item Filters to use to evaluate against - * the items in a collection - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param servletContext Context of the servlet container. - * @param headers If you want to access the collections as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return array of collection, on which has logged user permission - * to view. - * @throws WebApplicationException It is thrown when was problem with database reading - * (SQLException) or problem with creating - * context(ContextException). - */ - @GET - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public org.dspace.rest.common.FilteredCollection[] getCollections(@QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("100") - Integer limit, - @QueryParam("offset") @DefaultValue("0") - Integer offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("filters") @DefaultValue("is_item") - String filters, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context ServletContext servletContext, - @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading all filtered collections.(offset=" + offset + ",limit=" + limit + ")"); - org.dspace.core.Context context = null; - List collections = new ArrayList(); - - try { - context = createContext(); - - if (!((limit != null) && (limit >= 0) && (offset != null) && (offset >= 0))) { - log.warn("Paging was badly set."); - limit = 100; - offset = 0; - } - - List dspaceCollections = collectionService.findAll(context, limit, offset); - for (org.dspace.content.Collection dspaceCollection : dspaceCollections) { - if (authorizeService - .authorizeActionBoolean(context, dspaceCollection, org.dspace.core.Constants.READ)) { - FilteredCollection collection = new org.dspace.rest.common.FilteredCollection(dspaceCollection, - servletContext, - filters, expand, - context, limit, - offset); - collections.add(collection); - writeStats(dspaceCollection, UsageEvent.Action.VIEW, user_ip, user_agent, - xforwardedfor, headers, request, context); - } - } - context.complete(); - } catch (SQLException e) { - processException("Something went wrong while reading collections from database. Message: " + e, context); - } catch (ContextException e) { - processException("Something went wrong while reading collections, ContextError. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.trace("All collections were successfully read."); - return collections.toArray(new org.dspace.rest.common.FilteredCollection[0]); - } - - /** - * Return instance of collection with passed id. You can add more properties - * through expand parameter. - * - * @param collection_id Id of collection in DSpace. - * @param expand String in which is what you want to add to returned instance - * of collection. Options are: "all", "parentCommunityList", - * "parentCommunity", "topCommunity", "items", "license" and "logo". - * If you want to use multiple options, it must be separated by commas. - * @param limit Limit value for items in list in collection. Default value is - * 100. - * @param offset Offset of start index in list of items of collection. Default - * value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param filters Comma separated list of Item Filters to use to evaluate against - * the items in a collection - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @param servletContext Context of the servlet container. - * @return Return instance of collection. It can also return status code - * NOT_FOUND(404) if id of collection is incorrect or status code - * UNATHORIZED(401) if user has no permission to read collection. - * @throws WebApplicationException It is thrown when was problem with database reading - * (SQLException) or problem with creating - * context(ContextException). It is thrown by NOT_FOUND and - * UNATHORIZED status codes, too. - */ - @GET - @Path("/{collection_id}") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public org.dspace.rest.common.FilteredCollection getCollection(@PathParam("collection_id") String collection_id, - @QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("1000") Integer - limit, - @QueryParam("offset") @DefaultValue("0") Integer - offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @QueryParam("filters") @DefaultValue("is_item") - String filters, - @Context HttpHeaders headers, - @Context HttpServletRequest request, - @Context ServletContext servletContext) { - org.dspace.core.Context context = null; - FilteredCollection retColl = new org.dspace.rest.common.FilteredCollection(); - try { - context = createContext(); - - org.dspace.content.Collection collection = collectionService.findByIdOrLegacyId(context, collection_id); - if (authorizeService.authorizeActionBoolean(context, collection, org.dspace.core.Constants.READ)) { - writeStats(collection, UsageEvent.Action.VIEW, user_ip, - user_agent, xforwardedfor, headers, request, context); - retColl = new org.dspace.rest.common.FilteredCollection( - collection, servletContext, filters, expand, context, limit, offset); - } else { - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - context.complete(); - } catch (SQLException e) { - processException(e.getMessage(), context); - } catch (ContextException e) { - processException(String.format("Could not read collection %s. %s", collection_id, e.getMessage()), - context); - } finally { - processFinally(context); - } - return retColl; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/FilteredItemsResource.java b/dspace-rest/src/main/java/org/dspace/rest/FilteredItemsResource.java deleted file mode 100644 index 0f4331adc55d..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/FilteredItemsResource.java +++ /dev/null @@ -1,217 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import java.io.IOException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.UUID; -import javax.servlet.ServletContext; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.AuthorizeException; -import org.dspace.content.Item; -import org.dspace.content.MetadataField; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.ItemService; -import org.dspace.content.service.MetadataFieldService; -import org.dspace.content.service.MetadataSchemaService; -import org.dspace.content.service.SiteService; -import org.dspace.rest.common.ItemFilter; -import org.dspace.rest.common.ItemFilterQuery; -import org.dspace.rest.exceptions.ContextException; -import org.dspace.rest.filter.ItemFilterSet; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; -import org.dspace.usage.UsageEvent; - -/* - * This class retrieves items by a constructed metadata query evaluated against a set of Item Filters. - * - * @author Terry Brady, Georgetown University - */ -@Path("/filtered-items") -public class FilteredItemsResource extends Resource { - protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); - protected MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance() - .getMetadataSchemaService(); - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected SiteService siteService = ContentServiceFactory.getInstance().getSiteService(); - protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(FilteredItemsResource.class); - - /** - * Return instance of collection with passed id. You can add more properties - * through expand parameter. - * - * @param expand String in which is what you want to add to returned instance - * of collection. Options are: "all", "parentCommunityList", - * "parentCommunity", "items", "license" and "logo". If you want - * to use multiple options, it must be separated by commas. - * @param limit Limit value for items in list in collection. Default value is - * 100. - * @param offset Offset of start index in list of items of collection. Default - * value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param filters Comma separated list of Item Filters to use to evaluate against - * the items in a collection - * @param query_field List of metadata fields to evaluate in a metadata query. - * Each list value is used in conjunction with a query_op and query_field. - * @param query_op List of metadata operators to use in a metadata query. - * Each list value is used in conjunction with a query_field and query_field. - * @param query_val List of metadata values to evaluate in a metadata query. - * Each list value is used in conjunction with a query_value and query_op. - * @param collSel List of collections to query. - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @param servletContext Context of the servlet container. - * @return Return instance of collection. It can also return status code - * NOT_FOUND(404) if id of collection is incorrect or status code - * UNATHORIZED(401) if user has no permission to read collection. - * @throws WebApplicationException It is thrown when was problem with database reading - * (SQLException) or problem with creating - * context(ContextException). It is thrown by NOT_FOUND and - * UNATHORIZED status codes, too. - */ - @GET - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public org.dspace.rest.common.ItemFilter getItemQuery(@QueryParam("expand") String expand, - @QueryParam("limit") @DefaultValue("100") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @QueryParam("filters") @DefaultValue("is_item,all_filters") - String filters, - @QueryParam("query_field[]") @DefaultValue("dc.title") - List query_field, - @QueryParam("query_op[]") @DefaultValue("exists") - List query_op, - @QueryParam("query_val[]") @DefaultValue("") List - query_val, - @QueryParam("collSel[]") @DefaultValue("") List - collSel, - @Context HttpHeaders headers, - @Context HttpServletRequest request, - @Context ServletContext servletContext) { - org.dspace.core.Context context = null; - ItemFilterSet itemFilterSet = new ItemFilterSet(filters, true); - ItemFilter result = itemFilterSet.getAllFiltersFilter(); - try { - context = createContext(); - - int index = Math.min(query_field.size(), Math.min(query_op.size(), query_val.size())); - List itemFilterQueries = new ArrayList(); - for (int i = 0; i < index; i++) { - itemFilterQueries.add(new ItemFilterQuery(query_field.get(i), query_op.get(i), query_val.get(i))); - } - - String regexClause = configurationService.getProperty("rest.regex-clause"); - if (regexClause == null) { - regexClause = ""; - } - - List uuids = getUuidsFromStrings(collSel); - List> listFieldList = getMetadataFieldsList(context, query_field); - - Iterator childItems = itemService - .findByMetadataQuery(context, listFieldList, query_op, query_val, uuids, regexClause, offset, limit); - - int count = itemFilterSet.processSaveItems(context, servletContext, childItems, true, expand); - writeStats(siteService.findSite(context), UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, - headers, request, context); - result.annotateQuery(query_field, query_op, query_val); - result.setUnfilteredItemCount(count); - context.complete(); - } catch (IOException e) { - processException(e.getMessage(), context); - } catch (SQLException e) { - processException(e.getMessage(), context); - } catch (AuthorizeException e) { - processException(e.getMessage(), context); - } catch (ContextException e) { - processException("Unauthorized filtered item query. " + e.getMessage(), context); - } finally { - processFinally(context); - } - return result; - } - - private List> getMetadataFieldsList(org.dspace.core.Context context, List query_field) - throws SQLException { - List> listFieldList = new ArrayList>(); - for (String s : query_field) { - ArrayList fields = new ArrayList(); - listFieldList.add(fields); - if (s.equals("*")) { - continue; - } - String schema = ""; - String element = ""; - String qualifier = null; - String[] parts = s.split("\\."); - if (parts.length > 0) { - schema = parts[0]; - } - if (parts.length > 1) { - element = parts[1]; - } - if (parts.length > 2) { - qualifier = parts[2]; - } - - if (Item.ANY.equals(qualifier)) { - for (MetadataField mf : metadataFieldService - .findFieldsByElementNameUnqualified(context, schema, element)) { - fields.add(mf); - } - } else { - MetadataField mf = metadataFieldService.findByElement(context, schema, element, qualifier); - if (mf != null) { - fields.add(mf); - } - } - } - return listFieldList; - } - - private List getUuidsFromStrings(List collSel) { - List uuids = new ArrayList(); - for (String s : collSel) { - try { - uuids.add(UUID.fromString(s)); - } catch (IllegalArgumentException e) { - log.warn("Invalid collection UUID: " + s); - } - } - return uuids; - } - -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/FiltersResource.java b/dspace-rest/src/main/java/org/dspace/rest/FiltersResource.java deleted file mode 100644 index bff755f2de0b..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/FiltersResource.java +++ /dev/null @@ -1,60 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; - -import org.apache.logging.log4j.Logger; -import org.dspace.rest.common.ItemFilter; - -/** - * Class which provides read methods over the metadata registry. - * - * @author Terry Brady, Georgetown University - */ -@Path("/filters") -public class FiltersResource { - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(FiltersResource.class); - - /** - * Return all Use Case Item Filters in DSpace. - * - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return array of metadata schemas. - * @throws WebApplicationException It can be caused by creating context or while was problem - * with reading community from database(SQLException). - */ - @GET - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public ItemFilter[] getFilters(@QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading all Item Filters."); - return ItemFilter.getItemFilters(ItemFilter.ALL, false).toArray(new ItemFilter[0]); - } - -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/HandleResource.java b/dspace-rest/src/main/java/org/dspace/rest/HandleResource.java deleted file mode 100644 index 51436a1c00e3..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/HandleResource.java +++ /dev/null @@ -1,109 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import java.sql.SQLException; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.DSpaceObjectService; -import org.dspace.core.Constants; -import org.dspace.handle.factory.HandleServiceFactory; -import org.dspace.handle.service.HandleService; -import org.dspace.rest.common.Collection; -import org.dspace.rest.common.Community; -import org.dspace.rest.common.DSpaceObject; -import org.dspace.rest.common.Item; -import org.dspace.rest.exceptions.ContextException; - -/** - * Created with IntelliJ IDEA. - * User: peterdietz - * Date: 10/7/13 - * Time: 1:54 PM - * To change this template use File | Settings | File Templates. - */ -@Path("/handle") -public class HandleResource extends Resource { - protected HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(HandleResource.class); - - @GET - @Path("/{prefix}/{suffix}") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public org.dspace.rest.common.DSpaceObject getObject(@PathParam("prefix") String prefix, - @PathParam("suffix") String suffix, - @QueryParam("expand") String expand, - @javax.ws.rs.core.Context HttpHeaders headers) { - DSpaceObject dSpaceObject = new DSpaceObject(); - org.dspace.core.Context context = null; - - try { - context = createContext(); - - org.dspace.content.DSpaceObject dso = handleService.resolveToObject(context, prefix + "/" + suffix); - - if (dso == null) { - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance().getDSpaceObjectService(dso); - log.info("DSO Lookup by handle: [" + prefix + "] / [" + suffix + "] got result of: " + dSpaceObjectService - .getTypeText(dso) + "_" + dso.getID()); - - if (authorizeService.authorizeActionBoolean(context, dso, org.dspace.core.Constants.READ)) { - switch (dso.getType()) { - case Constants.COMMUNITY: - dSpaceObject = new Community((org.dspace.content.Community) dso, servletContext, expand, - context); - break; - case Constants.COLLECTION: - dSpaceObject = new Collection((org.dspace.content.Collection) dso, servletContext, expand, - context, null, null); - break; - case Constants.ITEM: - dSpaceObject = new Item((org.dspace.content.Item) dso, servletContext, expand, context); - break; - default: - dSpaceObject = new DSpaceObject(dso, servletContext); - break; - } - } else { - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - context.complete(); - - } catch (SQLException e) { - log.error(e.getMessage()); - throw new WebApplicationException(Response.Status.INTERNAL_SERVER_ERROR); - } catch (ContextException e) { - processException( - "Could not read handle(prefix=" + prefix + "), (suffix=" + suffix + ") ContextException. Message:" + e - .getMessage(), - context); - } finally { - processFinally(context); - } - - return dSpaceObject; - - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/HierarchyResource.java b/dspace-rest/src/main/java/org/dspace/rest/HierarchyResource.java deleted file mode 100644 index b2ffc559b0dc..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/HierarchyResource.java +++ /dev/null @@ -1,140 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import java.io.UnsupportedEncodingException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.Collection; -import org.dspace.content.Community; -import org.dspace.content.Site; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CommunityService; -import org.dspace.content.service.SiteService; -import org.dspace.rest.common.HierarchyCollection; -import org.dspace.rest.common.HierarchyCommunity; -import org.dspace.rest.common.HierarchySite; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; - - -/* - * This class retrieves the community hierarchy in an optimized format. - * - * @author Terry Brady, Georgetown University - */ -@Path("/hierarchy") -@Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) -public class HierarchyResource extends Resource { - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(HierarchyResource.class); - protected SiteService siteService = ContentServiceFactory.getInstance().getSiteService(); - protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - - /** - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the collection as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return instance of collection. It can also return status code - * NOT_FOUND(404) if id of collection is incorrect or status code - * @throws UnsupportedEncodingException The Character Encoding is not supported. - * @throws WebApplicationException It is thrown when was problem with database reading - * (SQLException) or problem with creating - * context(ContextException). It is thrown by NOT_FOUND and - * UNATHORIZED status codes, too. - */ - @GET - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public HierarchySite getHierarchy( - @QueryParam("userAgent") String user_agent, @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws UnsupportedEncodingException, WebApplicationException { - - org.dspace.core.Context context = null; - HierarchySite repo = new HierarchySite(); - - try { - context = createContext(); - - Site site = siteService.findSite(context); - repo.setId(site.getID().toString()); - repo.setName(site.getName()); - repo.setHandle(site.getHandle()); - List dspaceCommunities = communityService.findAllTop(context); - processCommunity(context, repo, dspaceCommunities); - } catch (Exception e) { - processException(e.getMessage(), context); - } finally { - if (context != null) { - try { - context.complete(); - } catch (SQLException e) { - log.error(e.getMessage() + " occurred while trying to close"); - } - } - } - return repo; - } - - - private void processCommunity(org.dspace.core.Context context, HierarchyCommunity parent, - List communities) throws SQLException { - if (communities == null) { - return; - } - if (communities.size() == 0) { - return; - } - List parentComms = new ArrayList(); - parent.setCommunities(parentComms); - for (Community comm : communities) { - if (!authorizeService.authorizeActionBoolean(context, comm, org.dspace.core.Constants.READ)) { - continue; - } - HierarchyCommunity mycomm = new HierarchyCommunity(comm.getID().toString(), comm.getName(), - comm.getHandle()); - parentComms.add(mycomm); - List colls = comm.getCollections(); - if (colls.size() > 0) { - List myColls = new ArrayList(); - mycomm.setCollections(myColls); - for (Collection coll : colls) { - if (!authorizeService.authorizeActionBoolean(context, coll, org.dspace.core.Constants.READ)) { - continue; - } - HierarchyCollection mycoll = new HierarchyCollection(coll.getID().toString(), coll.getName(), - coll.getHandle()); - myColls.add(mycoll); - } - } - processCommunity(context, mycomm, comm.getSubcommunities()); - } - - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/ItemsResource.java b/dspace-rest/src/main/java/org/dspace/rest/ItemsResource.java deleted file mode 100644 index 615aacac21cc..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/ItemsResource.java +++ /dev/null @@ -1,1007 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import java.io.IOException; -import java.io.InputStream; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Date; -import java.util.Iterator; -import java.util.List; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.AuthorizeException; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.authorize.service.ResourcePolicyService; -import org.dspace.content.Bundle; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.BitstreamFormatService; -import org.dspace.content.service.BitstreamService; -import org.dspace.content.service.BundleService; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.ItemService; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.eperson.service.GroupService; -import org.dspace.rest.common.Bitstream; -import org.dspace.rest.common.Item; -import org.dspace.rest.common.MetadataEntry; -import org.dspace.rest.exceptions.ContextException; -import org.dspace.usage.UsageEvent; - -/** - * Class which provide all CRUD methods over items. - * - * @author Rostislav Novak (Computing and Information Centre, CTU in Prague) - */ -// Every DSpace class used without namespace is from package org.dspace.rest.common.*. Otherwise namespace is defined. -@SuppressWarnings("deprecation") -@Path("/items") -public class ItemsResource extends Resource { - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); - protected BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance() - .getBitstreamFormatService(); - protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService(); - protected ResourcePolicyService resourcePolicyService = AuthorizeServiceFactory.getInstance() - .getResourcePolicyService(); - protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService(); - - private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemsResource.class); - - /** - * Return item properties without metadata and bitstreams. You can add - * additional properties by parameter expand. - * - * @param itemId Id of item in DSpace. - * @param expand String which define, what additional properties will be in - * returned item. Options are separeted by commas and are: "all", - * "metadata", "parentCollection", "parentCollectionList", - * "parentCommunityList" and "bitstreams". - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The value of the "rest-dspace-token" header must be set with passed - * token from login method. - * @param request Servlet's HTTP request object. - * @return If user is allowed to read item, it returns item. Otherwise is - * thrown WebApplicationException with response status - * UNAUTHORIZED(401) or NOT_FOUND(404) if was id incorrect. - * @throws WebApplicationException This exception can be throw by NOT_FOUND(bad id of item), - * UNAUTHORIZED, SQLException if wasproblem with reading from - * database and ContextException, if there was problem with - * creating context of DSpace. - */ - @GET - @Path("/{item_id}") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Item getItem(@PathParam("item_id") String itemId, @QueryParam("expand") String expand, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading item(id=" + itemId + ")."); - org.dspace.core.Context context = null; - Item item = null; - - try { - context = createContext(); - org.dspace.content.Item dspaceItem = findItem(context, itemId, org.dspace.core.Constants.READ); - - writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers, request, - context); - - item = new Item(dspaceItem, servletContext, expand, context); - context.complete(); - log.trace("Item(id=" + itemId + ") was successfully read."); - - } catch (SQLException e) { - processException("Could not read item(id=" + itemId + "), SQLException. Message: " + e, context); - } catch (ContextException e) { - processException("Could not read item(id=" + itemId + "), ContextException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - return item; - } - - /** - * It returns an array of items in DSpace. You can define how many items in - * list will be and from which index will start. Items in list are sorted by - * handle, not by id. - * - * @param expand String which define, what additional properties will be in - * returned item. Options are separeted by commas and are: "all", - * "metadata", "parentCollection", "parentCollectionList", - * "parentCommunityList" and "bitstreams". - * @param limit How many items in array will be. Default value is 100. - * @param offset On which index will array start. Default value is 0. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The value of the "rest-dspace-token" header must be set with passed - * token from login method. - * @param request Servlet's HTTP request object. - * @return Return array of items, on which has logged user into context - * permission. - * @throws WebApplicationException It can be thrown by SQLException, when was problem with - * reading items from database or ContextException, when was - * problem with creating context of DSpace. - */ - @GET - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Item[] getItems(@QueryParam("expand") String expand, @QueryParam("limit") @DefaultValue("100") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading items.(offset=" + offset + ",limit=" + limit + ")."); - org.dspace.core.Context context = null; - List items = null; - - try { - context = createContext(); - - Iterator dspaceItems = itemService.findAllUnfiltered(context); - items = new ArrayList(); - - if (!((limit != null) && (limit >= 0) && (offset != null) && (offset >= 0))) { - log.warn("Paging was badly set, using default values."); - limit = 100; - offset = 0; - } - - for (int i = 0; (dspaceItems.hasNext()) && (i < (limit + offset)); i++) { - org.dspace.content.Item dspaceItem = dspaceItems.next(); - if (i >= offset) { - if (itemService.isItemListedForUser(context, dspaceItem)) { - items.add(new Item(dspaceItem, servletContext, expand, context)); - writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, - headers, request, context); - } - } - } - context.complete(); - } catch (SQLException e) { - processException("Something went wrong while reading items from database. Message: " + e, context); - } catch (ContextException e) { - processException("Something went wrong while reading items, ContextException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.trace("Items were successfully read."); - return items.toArray(new Item[0]); - } - - /** - * Returns item metadata in list. - * - * @param itemId Id of item in DSpace. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The value of the "rest-dspace-token" header must be set with passed - * token from login method. - * @param request Servlet's HTTP request object. - * @return Return list of metadata fields if was everything ok. Otherwise it - * throw WebApplication exception with response code NOT_FOUND(404) - * or UNAUTHORIZED(401). - * @throws WebApplicationException It can be thrown by two exceptions: SQLException if was - * problem wtih reading item from database and ContextException, - * if was problem with creating context of DSpace. And can be - * thrown by NOT_FOUND and UNAUTHORIZED too. - */ - @GET - @Path("/{item_id}/metadata") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public MetadataEntry[] getItemMetadata(@PathParam("item_id") String itemId, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading item(id=" + itemId + ") metadata."); - org.dspace.core.Context context = null; - List metadata = null; - - try { - context = createContext(); - org.dspace.content.Item dspaceItem = findItem(context, itemId, org.dspace.core.Constants.READ); - - writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers, request, - context); - - metadata = new org.dspace.rest.common.Item(dspaceItem, servletContext, "metadata", context).getMetadata(); - context.complete(); - } catch (SQLException e) { - processException("Could not read item(id=" + itemId + "), SQLException. Message: " + e, context); - } catch (ContextException e) { - processException("Could not read item(id=" + itemId + "), ContextException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.trace("Item(id=" + itemId + ") metadata were successfully read."); - return metadata.toArray(new MetadataEntry[0]); - } - - /** - * Return array of bitstreams in item. It can be paged. - * - * @param itemId Id of item in DSpace. - * @param limit How many items will be in array. - * @param offset On which index will start array. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The value of the "rest-dspace-token" header must be set with passed - * token from login method. - * @param request Servlet's HTTP request object. - * @return Return paged array of bitstreams in item. - * @throws WebApplicationException It can be throw by NOT_FOUND, UNAUTHORIZED, SQLException if - * was problem with reading from database and ContextException - * if was problem with creating context of DSpace. - */ - @GET - @Path("/{item_id}/bitstreams") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Bitstream[] getItemBitstreams(@PathParam("item_id") String itemId, - @QueryParam("limit") @DefaultValue("20") Integer limit, - @QueryParam("offset") @DefaultValue("0") Integer offset, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading item(id=" + itemId + ") bitstreams.(offset=" + offset + ",limit=" + limit + ")"); - org.dspace.core.Context context = null; - List bitstreams = null; - try { - context = createContext(); - org.dspace.content.Item dspaceItem = findItem(context, itemId, org.dspace.core.Constants.READ); - - writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers, request, - context); - - List itemBitstreams = new Item(dspaceItem, servletContext, "bitstreams", context) - .getBitstreams(); - - if ((offset + limit) > (itemBitstreams.size() - offset)) { - bitstreams = itemBitstreams.subList(offset, itemBitstreams.size()); - } else { - bitstreams = itemBitstreams.subList(offset, offset + limit); - } - context.complete(); - } catch (SQLException e) { - processException("Could not read item(id=" + itemId + ") bitstreams, SQLExcpetion. Message: " + e, context); - } catch (ContextException e) { - processException( - "Could not read item(id=" + itemId + ") bitstreams, ContextException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.trace("Item(id=" + itemId + ") bitstreams were successfully read."); - return bitstreams.toArray(new Bitstream[0]); - } - - /** - * Adding metadata fields to item. If metadata key is in item, it will be - * added, NOT REPLACED! - * - * @param itemId Id of item in DSpace. - * @param metadata List of metadata fields, which will be added into item. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The value of the "rest-dspace-token" header must be set with passed - * token from login method. - * @param request Servlet's HTTP request object. - * @return It returns status code OK(200) if all was ok. UNAUTHORIZED(401) - * if user is not allowed to write to item. NOT_FOUND(404) if id of - * item is incorrect. - * @throws WebApplicationException It is throw by these exceptions: SQLException, if was problem - * with reading from database or writing to database. - * AuthorizeException, if was problem with authorization to item - * fields. ContextException, if was problem with creating - * context of DSpace. - */ - @POST - @Path("/{item_id}/metadata") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response addItemMetadata(@PathParam("item_id") String itemId, - List metadata, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Adding metadata to item(id=" + itemId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Item dspaceItem = findItem(context, itemId, org.dspace.core.Constants.WRITE); - - writeStats(dspaceItem, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers, request, - context); - - for (MetadataEntry entry : metadata) { - // TODO Test with Java split - String data[] = mySplit(entry.getKey()); // Done by my split, because of java split was not function. - if ((data.length >= 2) && (data.length <= 3)) { - itemService.addMetadata(context, dspaceItem, data[0], data[1], data[2], entry.getLanguage(), - entry.getValue()); - } - } - context.complete(); - - } catch (SQLException e) { - processException("Could not write metadata to item(id=" + itemId + "), SQLException. Message: " + e, - context); - } catch (ContextException e) { - processException( - "Could not write metadata to item(id=" + itemId + "), ContextException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.info("Metadata to item(id=" + itemId + ") were successfully added."); - return Response.status(Status.OK).build(); - } - - /** - * Create bitstream in item. - * - * @param name Btstream name to set. - * @param description Btstream description to set. - * @param groupId ResourcePolicy group (allowed to READ). - * @param year ResourcePolicy start date year. - * @param month ResourcePolicy start date month. - * @param day ResourcePolicy start date day. - * @param itemId Id of item in DSpace. - * @param inputStream Data of bitstream in inputStream. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The value of the "rest-dspace-token" header must be set with passed - * token from login method. - * @param request Servlet's HTTP request object. - * @return Returns bitstream with status code OK(200). If id of item is - * invalid , it returns status code NOT_FOUND(404). If user is not - * allowed to write to item, UNAUTHORIZED(401). - * @throws WebApplicationException It is thrown by these exceptions: SQLException, when was - * problem with reading/writing from/to database. - * AuthorizeException, when was problem with authorization to - * item and add bitstream to item. IOException, when was problem - * with creating file or reading from inpustream. - * ContextException. When was problem with creating context of - * DSpace. - */ - // TODO Add option to add bitstream by URI.(for very big files) - @POST - @Path("/{item_id}/bitstreams") - public Bitstream addItemBitstream(@PathParam("item_id") String itemId, InputStream inputStream, - @QueryParam("name") String name, @QueryParam("description") String description, - @QueryParam("groupId") String groupId, @QueryParam("year") Integer year, - @QueryParam("month") Integer month, - @QueryParam("day") Integer day, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Adding bitstream to item(id=" + itemId + ")."); - org.dspace.core.Context context = null; - Bitstream bitstream = null; - - try { - context = createContext(); - org.dspace.content.Item dspaceItem = findItem(context, itemId, org.dspace.core.Constants.WRITE); - - writeStats(dspaceItem, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers, request, - context); - - // Is better to add bitstream to ORIGINAL bundle or to item own? - log.trace("Creating bitstream in item."); - org.dspace.content.Bundle bundle = null; - org.dspace.content.Bitstream dspaceBitstream = null; - List bundles = itemService.getBundles(dspaceItem, org.dspace.core.Constants.CONTENT_BUNDLE_NAME); - - if (bundles != null && bundles.size() != 0) { - bundle = bundles.get(0); // There should be only one bundle ORIGINAL. - } - if (bundle == null) { - log.trace("Creating bundle in item."); - dspaceBitstream = itemService.createSingleBitstream(context, inputStream, dspaceItem); - } else { - log.trace("Getting bundle from item."); - dspaceBitstream = bitstreamService.create(context, bundle, inputStream); - } - - dspaceBitstream.setSource(context, "DSpace REST API"); - - // Set bitstream name and description - if (name != null) { - if (BitstreamResource.getMimeType(name) == null) { - dspaceBitstream.setFormat(context, bitstreamFormatService.findUnknown(context)); - } else { - bitstreamService.setFormat(context, dspaceBitstream, bitstreamFormatService - .findByMIMEType(context, BitstreamResource.getMimeType(name))); - } - - dspaceBitstream.setName(context, name); - } - if (description != null) { - dspaceBitstream.setDescription(context, description); - } - - // Create policy for bitstream - if (groupId != null) { - bundles = dspaceBitstream.getBundles(); - for (Bundle dspaceBundle : bundles) { - List bitstreamsPolicies = bundleService - .getBitstreamPolicies(context, dspaceBundle); - - // Remove default bitstream policies - List policiesToRemove = new ArrayList(); - for (org.dspace.authorize.ResourcePolicy policy : bitstreamsPolicies) { - if (policy.getdSpaceObject().getID().equals(dspaceBitstream.getID())) { - policiesToRemove.add(policy); - } - } - for (org.dspace.authorize.ResourcePolicy policy : policiesToRemove) { - bitstreamsPolicies.remove(policy); - } - - org.dspace.authorize.ResourcePolicy dspacePolicy = resourcePolicyService.create(context); - dspacePolicy.setAction(org.dspace.core.Constants.READ); - dspacePolicy.setGroup(groupService.findByIdOrLegacyId(context, groupId)); - dspacePolicy.setdSpaceObject(dspaceBitstream); - if ((year != null) || (month != null) || (day != null)) { - Date date = new Date(); - if (year != null) { - date.setYear(year - 1900); - } - if (month != null) { - date.setMonth(month - 1); - } - if (day != null) { - date.setDate(day); - } - date.setHours(0); - date.setMinutes(0); - date.setSeconds(0); - dspacePolicy.setStartDate(date); - } - - resourcePolicyService.update(context, dspacePolicy); - - bitstreamService.updateLastModified(context, dspaceBitstream); - } - } - - dspaceBitstream = bitstreamService.find(context, dspaceBitstream.getID()); - bitstream = new Bitstream(dspaceBitstream, servletContext, "", context); - - context.complete(); - - } catch (SQLException e) { - processException("Could not create bitstream in item(id=" + itemId + "), SQLException. Message: " + e, - context); - } catch (AuthorizeException e) { - processException("Could not create bitstream in item(id=" + itemId + "), AuthorizeException. Message: " + e, - context); - } catch (IOException e) { - processException("Could not create bitstream in item(id=" + itemId + "), IOException Message: " + e, - context); - } catch (ContextException e) { - processException( - "Could not create bitstream in item(id=" + itemId + "), ContextException Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.info("Bitstream(id=" + bitstream.getUUID() + ") was successfully added into item(id=" + itemId + ")."); - return bitstream; - } - - /** - * Replace all metadata in item with new passed metadata. - * - * @param itemId Id of item in DSpace. - * @param metadata List of metadata fields, which will replace old metadata in - * item. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The value of the "rest-dspace-token" header must be set with passed - * token from login method. - * @param request Servlet's HTTP request object. - * @return It returns status code: OK(200). NOT_FOUND(404) if item was not - * found, UNAUTHORIZED(401) if user is not allowed to write to item. - * @throws WebApplicationException It is thrown by: SQLException, when was problem with database - * reading or writting, AuthorizeException when was problem with - * authorization to item and metadata fields. And - * ContextException, when was problem with creating context of - * DSpace. - */ - @PUT - @Path("/{item_id}/metadata") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response updateItemMetadata(@PathParam("item_id") String itemId, MetadataEntry[] metadata, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Updating metadata in item(id=" + itemId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Item dspaceItem = findItem(context, itemId, org.dspace.core.Constants.WRITE); - - writeStats(dspaceItem, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers, request, - context); - - log.trace("Deleting original metadata from item."); - for (MetadataEntry entry : metadata) { - String data[] = mySplit(entry.getKey()); - if ((data.length >= 2) && (data.length <= 3)) { - itemService - .clearMetadata(context, dspaceItem, data[0], data[1], data[2], org.dspace.content.Item.ANY); - } - } - - log.trace("Adding new metadata to item."); - for (MetadataEntry entry : metadata) { - String data[] = mySplit(entry.getKey()); - if ((data.length >= 2) && (data.length <= 3)) { - itemService.addMetadata(context, dspaceItem, data[0], data[1], data[2], entry.getLanguage(), - entry.getValue()); - } - } - //Update the item to ensure that all the events get fired. - itemService.update(context, dspaceItem); - - context.complete(); - - } catch (SQLException e) { - processException("Could not update metadata in item(id=" + itemId + "), SQLException. Message: " + e, - context); - } catch (ContextException e) { - processException( - "Could not update metadata in item(id=" + itemId + "), ContextException. Message: " + e.getMessage(), - context); - } catch (AuthorizeException e) { - processException( - "Could not update metadata in item(id=" + itemId + "), AuthorizeException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.info("Metadata of item(id=" + itemId + ") were successfully updated."); - return Response.status(Status.OK).build(); - } - - /** - * Delete item from DSpace. It delete bitstreams only from item bundle. - * - * @param itemId Id of item which will be deleted. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The value of the "rest-dspace-token" header must be set with passed - * token from login method. - * @param request Servlet's HTTP request object. - * @return It returns status code: OK(200). NOT_FOUND(404) if item was not - * found, UNAUTHORIZED(401) if user is not allowed to delete item - * metadata. - * @throws WebApplicationException It can be thrown by: SQLException, when was problem with - * database reading. AuthorizeException, when was problem with - * authorization to item.(read and delete) IOException, when was - * problem with deleting bitstream file. ContextException, when - * was problem with creating context of DSpace. - */ - @DELETE - @Path("/{item_id}") - public Response deleteItem(@PathParam("item_id") String itemId, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Deleting item(id=" + itemId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Item dspaceItem = findItem(context, itemId, org.dspace.core.Constants.DELETE); - - writeStats(dspaceItem, UsageEvent.Action.REMOVE, user_ip, user_agent, xforwardedfor, headers, request, - context); - - log.trace("Deleting item."); - itemService.delete(context, dspaceItem); - context.complete(); - - } catch (SQLException e) { - processException("Could not delete item(id=" + itemId + "), SQLException. Message: " + e, context); - } catch (AuthorizeException e) { - processException("Could not delete item(id=" + itemId + "), AuthorizeException. Message: " + e, context); - throw new WebApplicationException(Response.Status.INTERNAL_SERVER_ERROR); - } catch (IOException e) { - processException("Could not delete item(id=" + itemId + "), IOException. Message: " + e, context); - } catch (ContextException e) { - processException("Could not delete item(id=" + itemId + "), ContextException. Message: " + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.info("Item(id=" + itemId + ") was successfully deleted."); - return Response.status(Status.OK).build(); - } - - /** - * Delete all item metadata. - * - * @param itemId Id of item in DSpace. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The value of the "rest-dspace-token" header must be set with passed - * token from login method. - * @param request Servlet's HTTP request object. - * @return It returns status code: OK(200). NOT_FOUND(404) if item was not - * found, UNAUTHORIZED(401) if user is not allowed to delete item - * metadata. - * @throws WebApplicationException Thrown by three exceptions. SQLException, when there was - * a problem reading item from database or editing metadata - * fields. AuthorizeException, when there was a problem with - * authorization to item. And ContextException, when there was a problem - * with creating a DSpace context. - */ - @DELETE - @Path("/{item_id}/metadata") - public Response deleteItemMetadata(@PathParam("item_id") String itemId, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Deleting metadata in item(id=" + itemId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Item dspaceItem = findItem(context, itemId, org.dspace.core.Constants.WRITE); - - writeStats(dspaceItem, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers, request, - context); - - log.trace("Deleting metadata."); - // TODO Rewrite without deprecated object. Leave there only generated metadata. - - String valueAccessioned = itemService - .getMetadataFirstValue(dspaceItem, "dc", "date", "accessioned", org.dspace.content.Item.ANY); - String valueAvailable = itemService - .getMetadataFirstValue(dspaceItem, "dc", "date", "available", org.dspace.content.Item.ANY); - String valueURI = itemService - .getMetadataFirstValue(dspaceItem, "dc", "identifier", "uri", org.dspace.content.Item.ANY); - String valueProvenance = itemService - .getMetadataFirstValue(dspaceItem, "dc", "description", "provenance", org.dspace.content.Item.ANY); - - itemService.clearMetadata(context, dspaceItem, org.dspace.content.Item.ANY, org.dspace.content.Item.ANY, - org.dspace.content.Item.ANY, - org.dspace.content.Item.ANY); - - // Add their generated metadata - itemService.addMetadata(context, dspaceItem, "dc", "date", "accessioned", null, valueAccessioned); - itemService.addMetadata(context, dspaceItem, "dc", "date", "available", null, valueAvailable); - itemService.addMetadata(context, dspaceItem, "dc", "identifier", "uri", null, valueURI); - itemService.addMetadata(context, dspaceItem, "dc", "description", "provenance", null, valueProvenance); - - context.complete(); - } catch (SQLException e) { - processException("Could not delete item(id=" + itemId + "), SQLException. Message: " + e, context); - } catch (ContextException e) { - processException("Could not delete item(id=" + itemId + "), ContextException. Message:" + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.info("Item(id=" + itemId + ") metadata were successfully deleted."); - return Response.status(Status.OK).build(); - } - - /** - * Delete bitstream from item bundle. - * - * @param itemId Id of item in DSpace. - * @param bitstreamId Id of bitstream, which will be deleted from bundle. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the context. - * The value of the "rest-dspace-token" header must be set with passed - * token from login method. - * @param request Servlet's HTTP request object. - * @return Return status code OK(200) if is all ok. NOT_FOUND(404) if item - * or bitstream was not found. UNAUTHORIZED(401) if user is not - * allowed to delete bitstream. - * @throws WebApplicationException It is thrown, when: Was problem with edditting database, - * SQLException. Or problem with authorization to item, bundle - * or bitstream, AuthorizeException. When was problem with - * deleting file IOException. Or problem with creating context - * of DSpace, ContextException. - */ - @DELETE - @Path("/{item_id}/bitstreams/{bitstream_id}") - public Response deleteItemBitstream(@PathParam("item_id") String itemId, - @PathParam("bitstream_id") String bitstreamId, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Deleting bitstream in item(id=" + itemId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - org.dspace.content.Item item = findItem(context, itemId, org.dspace.core.Constants.WRITE); - - org.dspace.content.Bitstream bitstream = bitstreamService.findByIdOrLegacyId(context, bitstreamId); - if (bitstream == null) { - context.abort(); - log.warn("Bitstream(id=" + bitstreamId + ") was not found."); - return Response.status(Status.NOT_FOUND).build(); - } else if (!authorizeService.authorizeActionBoolean(context, bitstream, org.dspace.core.Constants.DELETE)) { - context.abort(); - log.error("User(" + context.getCurrentUser() - .getEmail() + ") is not allowed to delete bitstream(id=" + bitstreamId + - ")."); - return Response.status(Status.UNAUTHORIZED).build(); - } - - writeStats(item, UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, headers, request, context); - writeStats(bitstream, UsageEvent.Action.REMOVE, user_ip, user_agent, xforwardedfor, headers, - request, context); - - log.trace("Deleting bitstream..."); - bitstreamService.delete(context, bitstream); - - context.complete(); - - } catch (SQLException e) { - processException("Could not delete bitstream(id=" + bitstreamId + "), SQLException. Message: " + e, - context); - } catch (AuthorizeException e) { - processException("Could not delete bitstream(id=" + bitstreamId + "), AuthorizeException. Message: " + e, - context); - } catch (IOException e) { - processException("Could not delete bitstream(id=" + bitstreamId + "), IOException. Message: " + e, context); - } catch (ContextException e) { - processException( - "Could not delete bitstream(id=" + bitstreamId + "), ContextException. Message:" + e.getMessage(), - context); - } finally { - processFinally(context); - } - - log.info("Bitstream(id=" + bitstreamId + ") from item(id=" + itemId + ") was successfuly deleted ."); - return Response.status(Status.OK).build(); - } - - /** - * Find items by one metadata field. - * - * @param metadataEntry Metadata field to search by. - * @param expand String which define, what additional properties will be in - * returned item. Options are separeted by commas and are: "all", - * "metadata", "parentCollection", "parentCollectionList", - * "parentCommunityList" and "bitstreams". - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into context, - * header "rest-dspace-token" must be set to token value retrieved - * from the login method. - * @param request Servlet's HTTP request object. - * @return Return array of found items. - * @throws WebApplicationException Can be thrown: SQLException - problem with - * database reading. AuthorizeException - problem with - * authorization to item. IOException - problem with - * reading from metadata field. ContextException - - * problem with creating DSpace context. - */ - @POST - @Path("/find-by-metadata-field") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Item[] findItemsByMetadataField(MetadataEntry metadataEntry, @QueryParam("expand") String expand, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Looking for item with metadata(key=" + metadataEntry.getKey() + ",value=" + metadataEntry.getValue() - + ", language=" + metadataEntry.getLanguage() + ")."); - org.dspace.core.Context context = null; - - List items = new ArrayList(); - String[] metadata = mySplit(metadataEntry.getKey()); - - // Must used own style. - if ((metadata.length < 2) || (metadata.length > 3)) { - log.error("Finding failed, bad metadata key."); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - - try { - context = createContext(); - - Iterator itemIterator = itemService - .findByMetadataField(context, metadataEntry.getSchema(), - metadataEntry.getElement(), metadataEntry.getQualifier(), - metadataEntry.getValue()); - - while (itemIterator.hasNext()) { - org.dspace.content.Item dspaceItem = itemIterator.next(); - //Only return items that are available for the current user - if (itemService.isItemListedForUser(context, dspaceItem)) { - Item item = new Item(dspaceItem, servletContext, expand, context); - writeStats(dspaceItem, UsageEvent.Action.VIEW, user_ip, user_agent, xforwardedfor, headers, - request, context); - items.add(item); - } - } - - context.complete(); - } catch (SQLException e) { - processException("Something went wrong while finding item. SQLException, Message: " + e, context); - } catch (ContextException e) { - processException("Context error:" + e.getMessage(), context); - } catch (AuthorizeException e) { - processException("Authorize error:" + e.getMessage(), context); - } catch (IOException e) { - processException("IO error:" + e.getMessage(), context); - } finally { - processFinally(context); - } - - if (items.size() == 0) { - log.info("Items not found."); - } else { - log.info("Items were found."); - } - - return items.toArray(new Item[0]); - } - - /** - * Find item from DSpace database. It is encapsulation of method - * org.dspace.content.Item.find with checking if item exist and if user - * logged into context has permission to do passed action. - * - * @param context Context of actual logged user. - * @param id Id of item in DSpace. - * @param action Constant from org.dspace.core.Constants. - * @return It returns DSpace item. - * @throws WebApplicationException Is thrown when item with passed id is not exists and if user - * has no permission to do passed action. - */ - private org.dspace.content.Item findItem(org.dspace.core.Context context, String id, int action) - throws WebApplicationException { - org.dspace.content.Item item = null; - try { - item = itemService.findByIdOrLegacyId(context, id); - - if (item == null) { - context.abort(); - log.warn("Item(id=" + id + ") was not found!"); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } else if (!authorizeService.authorizeActionBoolean(context, item, action)) { - context.abort(); - if (context.getCurrentUser() != null) { - log.error("User(" + context.getCurrentUser().getEmail() + ") has not permission to " - + getActionString(action) + " item!"); - } else { - log.error("User(anonymous) has not permission to " + getActionString(action) + " item!"); - } - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - } catch (SQLException e) { - processException("Something get wrong while finding item(id=" + id + "). SQLException, Message: " + e, - context); - } - return item; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/MetadataRegistryResource.java b/dspace-rest/src/main/java/org/dspace/rest/MetadataRegistryResource.java deleted file mode 100644 index 79e655e63de6..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/MetadataRegistryResource.java +++ /dev/null @@ -1,738 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import java.io.IOException; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.AuthorizeException; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.NonUniqueMetadataException; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.MetadataFieldService; -import org.dspace.content.service.MetadataSchemaService; -import org.dspace.content.service.SiteService; -import org.dspace.rest.common.MetadataField; -import org.dspace.rest.common.MetadataSchema; -import org.dspace.rest.exceptions.ContextException; -import org.dspace.usage.UsageEvent; - -/** - * Class which provides read methods over the metadata registry. - * - * @author Terry Brady, Georgetown University - * - * GET /registries/schema - Return the list of schemas in the registry - * GET /registries/schema/{schema_prefix} - Returns the specified schema - * GET /registries/schema/{schema_prefix}/metadata-fields/{element} - Returns the metadata field within a schema - * with an unqualified element name - * GET /registries/schema/{schema_prefix}/metadata-fields/{element}/{qualifier} - Returns the metadata field - * within a schema with a qualified element name - * POST /registries/schema/ - Add a schema to the schema registry - * POST /registries/schema/{schema_prefix}/metadata-fields - Add a metadata field to the specified schema - * GET /registries/metadata-fields/{field_id} - Return the specified metadata field - * PUT /registries/metadata-fields/{field_id} - Update the specified metadata field - * DELETE /registries/metadata-fields/{field_id} - Delete the specified metadata field from the metadata field registry - * DELETE /registries/schema/{schema_id} - Delete the specified schema from the schema registry - * - * Note: intentionally not providing since there is no date to update other than the namespace - * PUT /registries/schema/{schema_id} - */ -@Path("/registries") -public class MetadataRegistryResource extends Resource { - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); - protected MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance() - .getMetadataSchemaService(); - protected SiteService siteService = ContentServiceFactory.getInstance().getSiteService(); - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataRegistryResource.class); - - /** - * Return all metadata registry items in DSpace. - * - * @param expand String in which is what you want to add to returned instance - * of metadata schema. Options are: "all", "fields". Default value "fields". - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the metadata schema as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return array of metadata schemas. - * @throws WebApplicationException It can be caused by creating context or while was problem - * with reading schema from database(SQLException). - */ - @GET - @Path("/schema") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public MetadataSchema[] getSchemas(@QueryParam("expand") @DefaultValue("fields") String expand, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading all metadata schemas."); - org.dspace.core.Context context = null; - ArrayList metadataSchemas = null; - - try { - context = createContext(); - - List schemas = metadataSchemaService.findAll(context); - metadataSchemas = new ArrayList(); - for (org.dspace.content.MetadataSchema schema : schemas) { - metadataSchemas.add(new MetadataSchema(schema, expand, context)); - } - - context.complete(); - } catch (SQLException e) { - processException("Could not read metadata schemas, SQLException. Message:" + e, context); - } catch (ContextException e) { - processException("Could not read metadata schemas, ContextException. Message:" + e.getMessage(), context); - } finally { - processFinally(context); - } - - log.trace("All metadata schemas successfully read."); - return metadataSchemas.toArray(new MetadataSchema[0]); - } - - /** - * Returns metadata schema with basic properties. If you want more, use expand - * parameter or method for metadata fields. - * - * @param schemaPrefix Prefix for schema in DSpace. - * @param expand String in which is what you want to add to returned instance - * of metadata schema. Options are: "all", "fields". Default value "fields". - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the metadata schema as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return instance of org.dspace.rest.common.MetadataSchema. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading. Also if id/prefix of schema is incorrect - * or logged user into context has no permission to read. - */ - @GET - @Path("/schema/{schema_prefix}") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public MetadataSchema getSchema(@PathParam("schema_prefix") String schemaPrefix, - @QueryParam("expand") @DefaultValue("fields") String expand, - @QueryParam("userIP") String user_ip, @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading metadata schemas."); - org.dspace.core.Context context = null; - MetadataSchema metadataSchema = null; - - try { - context = createContext(); - - org.dspace.content.MetadataSchema schema = metadataSchemaService.find(context, schemaPrefix); - metadataSchema = new MetadataSchema(schema, expand, context); - if (schema == null) { - processException(String.format("Schema not found for index %s", schemaPrefix), context); - } - - context.complete(); - } catch (SQLException e) { - processException("Could not read metadata schema, SQLException. Message:" + e, context); - } catch (ContextException e) { - processException("Could not read metadata schema, ContextException. Message:" + e.getMessage(), context); - } finally { - processFinally(context); - } - - log.trace("Metadata schemas successfully read."); - return metadataSchema; - } - - /** - * Returns metadata field with basic properties. - * - * @param schemaPrefix Prefix for schema in DSpace. - * @param element Unqualified element name for field in the metadata registry. - * @param expand String in which is what you want to add to returned instance - * of the metadata field. Options are: "all", "parentSchema". Default value "". - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return instance of org.dspace.rest.common.MetadataField. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading. Also if id of field is incorrect - * or logged user into context has no permission to read. - */ - @GET - @Path("/schema/{schema_prefix}/metadata-fields/{element}") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public MetadataField getMetadataFieldUnqualified(@PathParam("schema_prefix") String schemaPrefix, - @PathParam("element") String element, - @QueryParam("expand") String expand, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - return getMetadataFieldQualified(schemaPrefix, element, "", expand, user_ip, user_agent, xforwardedfor, headers, - request); - } - - /** - * Returns metadata field with basic properties. - * - * @param schemaPrefix Prefix for schema in DSpace. - * @param element Element name for field in the metadata registry. - * @param qualifier Element name qualifier for field in the metadata registry. - * @param expand String in which is what you want to add to returned instance - * of the metadata field. Options are: "all", "parentSchema". Default value "". - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return instance of org.dspace.rest.common.MetadataField. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading. Also if id of field is incorrect - * or logged user into context has no permission to read. - */ - @GET - @Path("/schema/{schema_prefix}/metadata-fields/{element}/{qualifier}") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public MetadataField getMetadataFieldQualified(@PathParam("schema_prefix") String schemaPrefix, - @PathParam("element") String element, - @PathParam("qualifier") @DefaultValue("") String qualifier, - @QueryParam("expand") String expand, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading metadata field."); - org.dspace.core.Context context = null; - MetadataField metadataField = null; - - try { - context = createContext(); - - org.dspace.content.MetadataSchema schema = metadataSchemaService.find(context, schemaPrefix); - - if (schema == null) { - log.error(String.format("Schema not found for prefix %s", schemaPrefix)); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - - org.dspace.content.MetadataField field = metadataFieldService - .findByElement(context, schema, element, qualifier); - if (field == null) { - log.error(String.format("Field %s.%s.%s not found", schemaPrefix, element, qualifier)); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - metadataField = new MetadataField(schema, field, expand, context); - - context.complete(); - } catch (SQLException e) { - processException("Could not read metadata field, SQLException. Message:" + e, context); - } catch (ContextException e) { - processException("Could not read metadata field, ContextException. Message:" + e.getMessage(), context); - } finally { - processFinally(context); - } - - log.trace("Metadata field successfully read."); - return metadataField; - } - - /** - * Returns metadata field with basic properties. - * - * @param fieldId Id of metadata field in DSpace. - * @param expand String in which is what you want to add to returned instance - * of the metadata field. Options are: "all", "parentSchema". Default value "parentSchema". - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the community as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return instance of org.dspace.rest.common.MetadataField. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading. Also if id of field is incorrect - * or logged user into context has no permission to read. - */ - @GET - @Path("/metadata-fields/{field_id}") - @Produces( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public MetadataField getMetadataField(@PathParam("field_id") Integer fieldId, - @QueryParam("expand") @DefaultValue("parentSchema") String expand, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Reading metadata field."); - org.dspace.core.Context context = null; - MetadataField metadataField = null; - - try { - context = createContext(); - - org.dspace.content.MetadataField field = metadataFieldService.find(context, fieldId); - if (field == null) { - log.error(String.format("Metadata Field %d not found", fieldId)); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - org.dspace.content.MetadataSchema schema = field.getMetadataSchema(); - if (schema == null) { - log.error(String.format("Parent Schema not found for Metadata Field %d not found", fieldId)); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - metadataField = new MetadataField(schema, field, expand, context); - - context.complete(); - } catch (SQLException e) { - processException("Could not read metadata field, SQLException. Message:" + e, context); - } catch (ContextException e) { - processException("Could not read metadata field, ContextException. Message:" + e.getMessage(), context); - } finally { - processFinally(context); - } - - log.trace("Metadata field successfully read."); - return metadataField; - } - - /** - * Create schema in the schema registry. Creating a schema is restricted to admin users. - * - * @param schema Schema that will be added to the metadata registry. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the schema as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response 200 if was everything all right. Otherwise 400 - * when id of community was incorrect or 401 if was problem with - * permission to write into collection. - * Returns the schema (schemaId), if was all ok. - * @throws WebApplicationException It can be thrown by SQLException, AuthorizeException and - * ContextException. - */ - @POST - @Path("/schema") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public MetadataSchema createSchema(MetadataSchema schema, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Creating a schema."); - org.dspace.core.Context context = null; - MetadataSchema retSchema = null; - - try { - context = createContext(); - - if (!authorizeService.isAdmin(context)) { - context.abort(); - String user = "anonymous"; - if (context.getCurrentUser() != null) { - user = context.getCurrentUser().getEmail(); - } - log.error("User(" + user + ") does not have permission to create a metadata schema!"); - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - log.debug(String.format("Admin user creating schema with namespace %s and prefix %s", schema.getNamespace(), - schema.getPrefix())); - - org.dspace.content.MetadataSchema dspaceSchema = metadataSchemaService - .create(context, schema.getPrefix(), schema.getNamespace()); - log.debug("Creating return object."); - retSchema = new MetadataSchema(dspaceSchema, "", context); - - writeStats(siteService.findSite(context), UsageEvent.Action.CREATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - context.complete(); - log.info("Schema created" + retSchema.getPrefix()); - - } catch (SQLException e) { - processException("Could not create new metadata schema, SQLException. Message: " + e, context); - } catch (ContextException e) { - processException("Could not create new metadata schema, ContextException. Message: " + e.getMessage(), - context); - } catch (AuthorizeException e) { - processException("Could not create new metadata schema, AuthorizeException. Message: " + e.getMessage(), - context); - } catch (NonUniqueMetadataException e) { - processException( - "Could not create new metadata schema, NonUniqueMetadataException. Message: " + e.getMessage(), - context); - } catch (Exception e) { - processException("Could not create new metadata schema, Exception. Class: " + e.getClass(), context); - } finally { - processFinally(context); - } - - return retSchema; - } - - - /** - * Create a new metadata field within a schema. - * Creating a metadata field is restricted to admin users. - * - * @param schemaPrefix Prefix for schema in DSpace. - * @param field Field that will be added to the metadata registry for a schema. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the schema as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response 200 if was everything all right. Otherwise 400 - * when id of community was incorrect or 401 if was problem with - * permission to write into collection. - * Returns the field (with fieldId), if was all ok. - * @throws WebApplicationException It can be thrown by SQLException, AuthorizeException and - * ContextException. - */ - @POST - @Path("/schema/{schema_prefix}/metadata-fields") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public MetadataField createMetadataField(@PathParam("schema_prefix") String schemaPrefix, - MetadataField field, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info(String.format("Creating metadataField within schema %s.", schemaPrefix)); - org.dspace.core.Context context = null; - MetadataField retField = null; - - try { - context = createContext(); - - if (!authorizeService.isAdmin(context)) { - context.abort(); - String user = "anonymous"; - if (context.getCurrentUser() != null) { - user = context.getCurrentUser().getEmail(); - } - log.error("User(" + user + ") does not have permission to create a metadata field!"); - throw new WebApplicationException(Response.Status.UNAUTHORIZED); - } - - org.dspace.content.MetadataSchema schema = metadataSchemaService.find(context, schemaPrefix); - if (schema == null) { - log.error(String.format("Schema not found for prefix %s", schemaPrefix)); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - org.dspace.content.MetadataField dspaceField = metadataFieldService - .create(context, schema, field.getElement(), field.getQualifier(), field.getDescription()); - writeStats(siteService.findSite(context), UsageEvent.Action.CREATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - retField = new MetadataField(schema, dspaceField, "", context); - context.complete(); - log.info("Metadata field created within schema" + retField.getName()); - } catch (SQLException e) { - processException("Could not create new metadata field, SQLException. Message: " + e, context); - } catch (ContextException e) { - processException("Could not create new metadata field, ContextException. Message: " + e.getMessage(), - context); - } catch (AuthorizeException e) { - processException("Could not create new metadata field, AuthorizeException. Message: " + e.getMessage(), - context); - } catch (NonUniqueMetadataException e) { - processException( - "Could not create new metadata field, NonUniqueMetadataException. Message: " + e.getMessage(), context); - } catch (Exception e) { - processException("Could not create new metadata field, Exception. Message: " + e.getMessage(), context); - } finally { - processFinally(context); - } - - return retField; - } - - //@PUT - //@Path("/schema/{schema_prefix}") - //Assumption - there are no meaningful fields to update for a schema - - /** - * Update metadata field. Replace all information about community except the id and the containing schema. - * - * @param fieldId Id of the field in the DSpace metdata registry. - * @param field Instance of the metadata field which will replace actual metadata field in - * DSpace. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the metadata field as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Response 200 if was all ok. Otherwise 400 if was id incorrect or - * 401 if logged user has no permission to update the metadata field. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading or writing. Or problem with writing to - * community caused by authorization. - */ - @PUT - @Path("/metadata-fields/{field_id}") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response updateMetadataField(@PathParam("field_id") Integer fieldId, MetadataField field, - @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, @Context HttpHeaders headers, - @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Updating metadata field(id=" + fieldId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - - org.dspace.content.MetadataField dspaceField = metadataFieldService.find(context, fieldId); - if (field == null) { - log.error(String.format("Metadata Field %d not found", fieldId)); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - - writeStats(siteService.findSite(context), UsageEvent.Action.UPDATE, user_ip, user_agent, xforwardedfor, - headers, request, context); - - dspaceField.setElement(field.getElement()); - dspaceField.setQualifier(field.getQualifier()); - dspaceField.setScopeNote(field.getDescription()); - metadataFieldService.update(context, dspaceField); - - context.complete(); - - } catch (SQLException e) { - processException("Could not update metadata field(id=" + fieldId + "), AuthorizeException. Message:" + e, - context); - } catch (ContextException e) { - processException("Could not update metadata field(id=" + fieldId + "), ContextException Message:" + e, - context); - } catch (AuthorizeException e) { - processException("Could not update metadata field(id=" + fieldId + "), AuthorizeException. Message:" + e, - context); - } catch (NonUniqueMetadataException e) { - processException( - "Could not update metadata field(id=" + fieldId + "), NonUniqueMetadataException. Message:" + e, - context); - } catch (IOException e) { - processException("Could not update metadata field(id=" + fieldId + "), IOException. Message:" + e, context); - } finally { - processFinally(context); - } - - log.info("Metadata Field(id=" + fieldId + ") has been successfully updated."); - return Response.ok().build(); - } - - /** - * Delete metadata field from the DSpace metadata registry - * - * @param fieldId Id of the metadata field in DSpace. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the metadata field as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response code OK(200) if was everything all right. - * Otherwise return NOT_FOUND(404) if was id of metadata field is incorrect. - * Or (UNAUTHORIZED)401 if was problem with permission to metadata field. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading or deleting. Or problem with deleting - * metadata field caused by IOException or authorization. - */ - @DELETE - @Path("/metadata-fields/{field_id}") - public Response deleteMetadataField(@PathParam("field_id") Integer fieldId, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Deleting metadata field(id=" + fieldId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - - org.dspace.content.MetadataField dspaceField = metadataFieldService.find(context, fieldId); - if (dspaceField == null) { - log.error(String.format("Metadata Field %d not found", fieldId)); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - writeStats(siteService.findSite(context), UsageEvent.Action.DELETE, user_ip, user_agent, xforwardedfor, - headers, - request, context); - - metadataFieldService.delete(context, dspaceField); - context.complete(); - - } catch (SQLException e) { - processException("Could not delete metadata field(id=" + fieldId + "), SQLException. Message:" + e, - context); - } catch (AuthorizeException e) { - processException("Could not delete metadata field(id=" + fieldId + "), AuthorizeException. Message:" + e, - context); - } catch (ContextException e) { - processException( - "Could not delete metadata field(id=" + fieldId + "), ContextException. Message:" + e.getMessage(), - context); - } finally { - processFinally(context); - } - - - log.info("Metadata field(id=" + fieldId + ") was successfully deleted."); - return Response.status(Response.Status.OK).build(); - } - - /** - * Delete metadata schema from the DSpace metadata registry - * - * @param schemaId Id of the metadata schema in DSpace. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the metadata schema as the user logged into the - * context. The value of the "rest-dspace-token" header must be set - * to the token received from the login method response. - * @param request Servlet's HTTP request object. - * @return Return response code OK(200) if was everything all right. - * Otherwise return NOT_FOUND(404) if was id of metadata schema is incorrect. - * Or (UNAUTHORIZED)401 if was problem with permission to metadata schema. - * @throws WebApplicationException Thrown if there was a problem with creating context or problem - * with database reading or deleting. Or problem with deleting - * metadata schema caused by IOException or authorization. - */ - @DELETE - @Path("/schema/{schema_id}") - public Response deleteSchema(@PathParam("schema_id") Integer schemaId, @QueryParam("userIP") String user_ip, - @QueryParam("userAgent") String user_agent, - @QueryParam("xforwardedfor") String xforwardedfor, - @Context HttpHeaders headers, @Context HttpServletRequest request) - throws WebApplicationException { - - log.info("Deleting metadata schema(id=" + schemaId + ")."); - org.dspace.core.Context context = null; - - try { - context = createContext(); - - org.dspace.content.MetadataSchema dspaceSchema = metadataSchemaService.find(context, schemaId); - if (dspaceSchema == null) { - log.error(String.format("Metadata Schema %d not found", schemaId)); - throw new WebApplicationException(Response.Status.NOT_FOUND); - } - writeStats(siteService.findSite(context), UsageEvent.Action.DELETE, user_ip, user_agent, xforwardedfor, - headers, - request, context); - - metadataSchemaService.delete(context, dspaceSchema); - context.complete(); - - } catch (SQLException e) { - processException("Could not delete metadata schema(id=" + schemaId + "), SQLException. Message:" + e, - context); - } catch (AuthorizeException e) { - processException("Could not delete metadata schema(id=" + schemaId + "), AuthorizeException. Message:" + e, - context); - } catch (ContextException e) { - processException( - "Could not delete metadata schema(id=" + schemaId + "), ContextException. Message:" + e.getMessage(), - context); - } finally { - processFinally(context); - } - - - log.info("Metadata schema(id=" + schemaId + ") was successfully deleted."); - return Response.status(Response.Status.OK).build(); - } - -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/Resource.java b/dspace-rest/src/main/java/org/dspace/rest/Resource.java deleted file mode 100644 index 7a7624fef03e..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/Resource.java +++ /dev/null @@ -1,212 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collection; -import javax.servlet.ServletContext; -import javax.servlet.http.HttpServletRequest; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.Response; - -import org.apache.logging.log4j.Logger; -import org.dspace.content.DSpaceObject; -import org.dspace.core.Context; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.rest.exceptions.ContextException; -import org.dspace.services.factory.DSpaceServicesFactory; -import org.dspace.usage.UsageEvent; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.authority.SimpleGrantedAuthority; -import org.springframework.security.core.context.SecurityContextHolder; - -/** - * Superclass of all resource classes in REST API. It has methods for creating - * context, write statistics, processsing exceptions, splitting a key of - * metadata, string representation of action and method for getting the logged - * in user from the token in request header. - * - * @author Rostislav Novak (Computing and Information Centre, CTU in Prague) - */ -public class Resource { - - @javax.ws.rs.core.Context - public ServletContext servletContext; - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Resource.class); - - private static final boolean writeStatistics; - - static { - writeStatistics = DSpaceServicesFactory.getInstance().getConfigurationService() - .getBooleanProperty("rest.stats", false); - } - - /** - * Create context to work with DSpace database. It can create context - * with or without a logged in user (retrieved from SecurityContextHolder). Throws - * WebApplicationException caused by: SQLException if there was a problem - * with reading from database. Throws AuthorizeException if there was - * a problem with authorization to read from the database. Throws Exception - * if there was a problem creating context. - * - * @return Newly created context with the logged in user unless the specified user was null. - * If user is null, create the context without a logged in user. - * @throws ContextException Thrown in case of a problem creating context. Can be caused by - * SQLException error in creating context or finding the user to - * log in. Can be caused by AuthorizeException if there was a - * problem authorizing the found user. - * @throws SQLException An exception that provides information on a database access error or other errors. - */ - protected static org.dspace.core.Context createContext() throws ContextException, SQLException { - org.dspace.core.Context context = new org.dspace.core.Context(); - //context.getDBConnection().setAutoCommit(false); // Disable autocommit. - - Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); - if (authentication != null) { - Collection specialGroups = (Collection) authentication - .getAuthorities(); - for (SimpleGrantedAuthority grantedAuthority : specialGroups) { - context.setSpecialGroup(EPersonServiceFactory.getInstance().getGroupService() - .findByName(context, grantedAuthority.getAuthority()) - .getID()); - } - context.setCurrentUser( - EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, authentication.getName())); - } - - return context; - } - - /** - * Records a statistics event about an object used via REST API. - * - * @param dspaceObject DSpace object on which a request was performed. - * @param action Action that was performed. - * @param user_ip User's IP address. - * @param user_agent User agent string (specifies browser used and its version). - * @param xforwardedfor When accessed via a reverse proxy, the application sees the proxy's IP as the - * source of the request. The proxy may be configured to add the - * "X-Forwarded-For" HTTP header containing the original IP of the client - * so that the reverse-proxied application can get the client's IP. - * @param headers If you want to access the item as the user logged into the - * context. The header "rest-dspace-token" with the token passed - * from the login method must be set. - * @param request Servlet's HTTP request object. - * @param context Context which must be aborted. - */ - protected void writeStats(DSpaceObject dspaceObject, UsageEvent.Action action, - String user_ip, String user_agent, String xforwardedfor, HttpHeaders headers, - HttpServletRequest request, Context context) { - if (!writeStatistics) { - return; - } - - if ((user_ip == null) || (user_ip.length() == 0)) { - DSpaceServicesFactory.getInstance().getEventService() - .fireEvent(new UsageEvent(action, request, context, dspaceObject)); - } else { - DSpaceServicesFactory.getInstance().getEventService().fireEvent( - new UsageEvent(action, user_ip, user_agent, xforwardedfor, context, dspaceObject)); - } - - log.debug("fired event"); - } - - /** - * Process exception, print message to logger error stream and abort DSpace - * context. - * - * @param message Message, which will be printed to error stream. - * @param context Context which must be aborted. - * @throws WebApplicationException This exception is throw for user of REST api. - */ - protected static void processException(String message, org.dspace.core.Context context) - throws WebApplicationException { - if ((context != null) && (context.isValid())) { - context.abort(); - } - log.error(message); - throw new WebApplicationException(Response.Status.INTERNAL_SERVER_ERROR); - } - - /** - * Process finally statement. It will print message to logger error stream - * and abort DSpace context, if was not properly ended. - * - * @param context Context which must be aborted. - * @throws WebApplicationException This exception is thrown for user of REST API. - */ - protected void processFinally(org.dspace.core.Context context) throws WebApplicationException { - if ((context != null) && (context.isValid())) { - context.abort(); - log.error("Something get wrong. Aborting context in finally statement."); - throw new WebApplicationException(Response.Status.INTERNAL_SERVER_ERROR); - } - } - - /** - * Split string with regex ".". - * - * @param key String which will be splitted. - * @return String array filed with separated string. - */ - protected String[] mySplit(String key) { - ArrayList list = new ArrayList(); - int prev = 0; - for (int i = 0; i < key.length(); i++) { - if (key.charAt(i) == '.') { - list.add(key.substring(prev, i)); - prev = i + 1; - } else if (i + 1 == key.length()) { - list.add(key.substring(prev, i + 1)); - } - } - - if (list.size() == 2) { - list.add(null); - } - - return list.toArray(new String[0]); - } - - /** - * Return string representation of values - * org.dspace.core.Constants.{READ,WRITE,DELETE}. - * - * @param action Constant from org.dspace.core.Constants.* - * @return String representation. read or write or delete. - */ - protected String getActionString(int action) { - String actionStr; - switch (action) { - case org.dspace.core.Constants.READ: - actionStr = "read"; - break; - case org.dspace.core.Constants.WRITE: - actionStr = "write"; - break; - case org.dspace.core.Constants.DELETE: - actionStr = "delete"; - break; - case org.dspace.core.Constants.REMOVE: - actionStr = "remove"; - break; - case org.dspace.core.Constants.ADD: - actionStr = "add"; - break; - default: - actionStr = "(?action?)"; - break; - } - return actionStr; - } - -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/RestIndex.java b/dspace-rest/src/main/java/org/dspace/rest/RestIndex.java deleted file mode 100644 index 26b1150229e8..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/RestIndex.java +++ /dev/null @@ -1,301 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.sql.SQLException; -import java.util.Iterator; -import javax.servlet.ServletContext; -import javax.ws.rs.Consumes; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.Logger; -import org.dspace.authenticate.AuthenticationMethod; -import org.dspace.authenticate.ShibAuthentication; -import org.dspace.authenticate.factory.AuthenticateServiceFactory; -import org.dspace.authenticate.service.AuthenticationService; -import org.dspace.eperson.EPerson; -import org.dspace.eperson.factory.EPersonServiceFactory; -import org.dspace.eperson.service.EPersonService; -import org.dspace.rest.common.Status; -import org.dspace.rest.exceptions.ContextException; -import org.dspace.utils.DSpace; - -/** - * Root of RESTful api. It provides login and logout. Also have method for - * printing every method which is provides by RESTful api. - * - * @author Rostislav Novak (Computing and Information Centre, CTU in Prague) - */ -@Path("/") -public class RestIndex { - protected EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService(); - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(RestIndex.class); - - /** - * Return html page with information about REST api. It contains methods all - * methods provide by REST api. - * - * @param servletContext Context of the servlet container. - * @return HTML page which has information about all methods of REST API. - */ - @GET - @Produces(MediaType.TEXT_HTML) - public String sayHtmlHello(@Context ServletContext servletContext) { - // TODO Better graphics, add arguments to all methods. (limit, offset, item and so on) - return "DSpace REST - index" + - "" - + "

    DSpace REST API (Deprecated)

    " + - "This REST API is deprecated and will be removed in v8." + - " Please use the new Server API webapp instead.
    " + - "Server path: " + servletContext.getContextPath() + - "

    Index

    " + - "
      " + - "
    • GET / - Return this page.
    • " + - "
    • GET /test - Return the string \"REST api is running\" for testing purposes.
    • " + - "
    • POST /login - Method for logging into the DSpace RESTful API. You must post the parameters \"email\"" + - " and \"password\". Example: \"email=test@dspace&password=pass\". Returns a JSESSIONID cookie which can " + - "be used for future authenticated requests.
    • " + - "
    • POST /logout - Method for logging out of the DSpace RESTful API. The request must include the " + - "\"rest-dspace-token\" token
    • header." + - "
    " + - "

    Communities

    " + - "
      " + - "
    • GET /communities - Return an array of all communities in DSpace.
    • " + - "
    • GET /communities/top-communities - Returns an array of all top-leve communities in DSpace.
    • " + - "
    • GET /communities/{communityId} - Returns a community with the specified ID.
    • " + - "
    • GET /communities/{communityId}/collections - Returns an array of collections of the specified " + - "community.
    • " + - "
    • GET /communities/{communityId}/communities - Returns an array of subcommunities of the specified " + - "community.
    • " + - "
    • POST /communities - Create a new top-level community. You must post a community.
    • " + - "
    • POST /communities/{communityId}/collections - Create a new collection in the specified community. " + - "You must post a collection.
    • " + - "
    • POST /communities/{communityId}/communities - Create a new subcommunity in the specified community. " + - "You must post a community.
    • " + - "
    • PUT /communities/{communityId} - Update the specified community.
    • " + - "
    • DELETE /communities/{communityId} - Delete the specified community.
    • " + - "
    • DELETE /communities/{communityId}/collections/{collectionId} - Delete the specified collection in " + - "the specified community.
    • " + - "
    • DELETE /communities/{communityId}/communities/{communityId2} - Delete the specified subcommunity " + - "(communityId2) in the specified community (communityId).
    • " + - "
    " + - "

    Collections

    " + - "
      " + - "
    • GET /collections - Return all DSpace collections in array.
    • " + - "
    • GET /collections/{collectionId} - Return a collection with the specified ID.
    • " + - "
    • GET /collections/{collectionId}/items - Return all items of the specified collection.
    • " + - "
    • POST /collections/{collectionId}/items - Create an item in the specified collection. You must post " + - "an item.
    • " + - "
    • POST /collections/find-collection - Find a collection by name.
    • " + - "
    • PUT /collections/{collectionId}
    • - Update the specified collection. You must post a collection." + - "
    • DELETE /collections/{collectionId} - Delete the specified collection from DSpace.
    • " + - "
    • DELETE /collections/{collectionId}/items/{itemId} - Delete the specified item (itemId) in the " + - "specified collection (collectionId).
    • " + - "
    " + - "

    Items

    " + - "
      " + - "
    • GET /items - Return a list of items.
    • " + - "
    • GET /items/{item id} - Return the specified item.
    • " + - "
    • GET /items/{item id}/metadata - Return metadata of the specified item.
    • " + - "
    • GET /items/{item id}/bitstreams - Return bitstreams of the specified item.
    • " + - "
    • POST /items/find-by-metadata-field - Find items by the specified metadata value.
    • " + - "
    • POST /items/{item id}/metadata - Add metadata to the specified item.
    • " + - "
    • POST /items/{item id}/bitstreams - Add a bitstream to the specified item.
    • " + - "
    • PUT /items/{item id}/metadata - Update metadata in the specified item.
    • " + - "
    • DELETE /items/{item id} - Delete the specified item.
    • " + - "
    • DELETE /items/{item id}/metadata - Clear metadata of the specified item.
    • " + - "
    • DELETE /items/{item id}/bitstreams/{bitstream id} - Delete the specified bitstream of the specified " + - "item.
    • " + - "
    " + - "

    Bitstreams

    " + - "
      " + - "
    • GET /bitstreams - Return all bitstreams in DSpace.
    • " + - "
    • GET /bitstreams/{bitstream id} - Return the specified bitstream.
    • " + - "
    • GET /bitstreams/{bitstream id}/policy - Return policies of the specified bitstream.
    • " + - "
    • GET /bitstreams/{bitstream id}/retrieve - Return the contents of the specified bitstream.
    • " + - "
    • POST /bitstreams/{bitstream id}/policy - Add a policy to the specified bitstream.
    • " + - "
    • PUT /bitstreams/{bitstream id}/data - Update the contents of the specified bitstream.
    • " + - "
    • PUT /bitstreams/{bitstream id} - Update metadata of the specified bitstream.
    • " + - "
    • DELETE /bitstreams/{bitstream id} - Delete the specified bitstream from DSpace.
    • " + - "
    • DELETE /bitstreams/{bitstream id}/policy/{policy_id} - Delete the specified bitstream policy.
    • " + - "
    " + - "

    Hierarchy

    " + - "
      " + - "
    • GET /hierarchy - Return hierarchy of communities and collections in tree form. Each object is " + - "minimally populated (name, handle, id) for efficient retrieval.
    • " + - "
    " + - "

    Metadata and Schema Registry

    " + - "
      " + - "
    • GET /registries/schema - Return the list of metadata schemas in the registry
    • " + - "
    • GET /registries/schema/{schema_prefix} - Returns the specified metadata schema
    • " + - "
    • GET /registries/schema/{schema_prefix}/metadata-fields/{element} - Returns the metadata field within" + - " a schema with an unqualified element name
    • " + - "
    • GET /registries/schema/{schema_prefix}/metadata-fields/{element}/{qualifier} - Returns the metadata " + - "field within a schema with a qualified element name
    • " + - "
    • POST /registries/schema/ - Add a schema to the schema registry
    • " + - "
    • POST /registries/schema/{schema_prefix}/metadata-fields - Add a metadata field to the specified " + - "schema
    • " + - "
    • GET /registries/metadata-fields/{field_id} - Return the specified metadata field
    • " + - "
    • PUT /registries/metadata-fields/{field_id} - Update the specified metadata field
    • " + - "
    • DELETE /registries/metadata-fields/{field_id} - Delete the specified metadata field from the " + - "metadata field registry
    • " + - "
    • DELETE /registries/schema/{schema_id} - Delete the specified schema from the schema registry
    • " + - "
    " + - "

    Query/Reporting Tools

    " + - "
      " + - "
    • GET /reports - Return a list of report tools built on the rest api
    • " + - "
    • GET /reports/{nickname} - Return a redirect to a specific report
    • " + - "
    • GET /filters - Return a list of use case filters available for quality control reporting
    • " + - "
    • GET /filtered-collections - Return collections and item counts based on pre-defined filters
    • " + - "
    • GET /filtered-collections/{collection_id} - Return items and item counts for a collection based on " + - "pre-defined filters
    • " + - "
    • GET /filtered-items - Retrieve a set of items based on a metadata query and a set of filters
    • " + - "
    " + - " "; - } - - /** - * Method only for testing whether the REST API is running. - * - * @return String "REST api is running." - */ - @GET - @Path("/test") - public String test() { - return "REST api is running."; - } - - /** - * Method to login a user into REST API. - * - * @return Returns response code OK and a token. Otherwise returns response - * code FORBIDDEN(403). - */ - @POST - @Path("/login") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response login() { - //If you can get here, you are authenticated, the actual login is handled by spring security - return Response.ok().build(); - } - - @GET - @Path("/shibboleth-login") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response shibbolethLogin() { - //If you can get here, you are authenticated, the actual login is handled by spring security - return Response.ok().build(); - } - - @GET - @Path("/login-shibboleth") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response shibbolethLoginEndPoint() { - org.dspace.core.Context context = null; - try { - context = Resource.createContext(); - AuthenticationService authenticationService = AuthenticateServiceFactory.getInstance() - .getAuthenticationService(); - Iterator authenticationMethodIterator = authenticationService - .authenticationMethodIterator(); - while (authenticationMethodIterator.hasNext()) { - AuthenticationMethod authenticationMethod = authenticationMethodIterator.next(); - if (authenticationMethod instanceof ShibAuthentication) { - //TODO: Perhaps look for a better way of handling this ? - org.dspace.services.model.Request currentRequest = new DSpace().getRequestService() - .getCurrentRequest(); - String loginPageURL = authenticationMethod - .loginPageURL(context, currentRequest.getHttpServletRequest(), - currentRequest.getHttpServletResponse()); - if (StringUtils.isNotBlank(loginPageURL)) { - currentRequest.getHttpServletResponse().sendRedirect(loginPageURL); - } - } - } - context.abort(); - } catch (ContextException | SQLException | IOException e) { - Resource.processException("Shibboleth endpoint error: " + e.getMessage(), context); - } finally { - if (context != null && context.isValid()) { - context.abort(); - } - - } - return Response.ok().build(); - } - - /** - * Method to logout a user from DSpace REST API. Removes the token and user from - * TokenHolder. - * - * @param headers Request header which contains the header named - * "rest-dspace-token" containing the token as value. - * @return Return response OK, otherwise BAD_REQUEST, if there was a problem with - * logout or the token is incorrect. - */ - @POST - @Path("/logout") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Response logout(@Context HttpHeaders headers) { - //If you can get here, you are logged out, this actual logout is handled by spring security - return Response.ok().build(); - } - - /** - * Method to check current status of the service and logged in user. - * - * okay: true | false - * authenticated: true | false - * epersonEMAIL: user@example.com - * epersonNAME: John Doe - * - * @param headers Request header which contains the header named - * "rest-dspace-token" containing the token as value. - * @return status the Status object with information about REST API - * @throws UnsupportedEncodingException The Character Encoding is not supported. - */ - @GET - @Path("/status") - @Consumes( {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public Status status(@Context HttpHeaders headers) - throws UnsupportedEncodingException { - org.dspace.core.Context context = null; - - try { - context = Resource.createContext(); - EPerson ePerson = context.getCurrentUser(); - - if (ePerson != null) { - //DB EPerson needed since token won't have full info, need context - EPerson dbEPerson = epersonService.findByEmail(context, ePerson.getEmail()); - - Status status = new Status(dbEPerson.getEmail(), dbEPerson.getFullName()); - return status; - } - } catch (ContextException e) { - Resource.processException("Status context error: " + e.getMessage(), context); - } catch (SQLException e) { - Resource.processException("Status eperson db lookup error: " + e.getMessage(), context); - } finally { - context.abort(); - } - - //fallback status, unauth - return new Status(); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/RestReports.java b/dspace-rest/src/main/java/org/dspace/rest/RestReports.java deleted file mode 100644 index 4af556b6f87b..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/RestReports.java +++ /dev/null @@ -1,86 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest; - -import java.net.URI; -import java.util.ArrayList; -import java.util.List; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.WebApplicationException; -import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.UriInfo; - -import org.apache.logging.log4j.Logger; -import org.dspace.rest.common.Report; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; - - -/** - * Root of RESTful api. It provides login and logout. Also have method for - * printing every method which is provides by RESTful api. - * - * @author Terry Brady, Georgetown University - */ -@Path("/reports") -public class RestReports { - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(RestReports.class); - - protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - public static final String REST_RPT_URL = "rest.report-url."; - - /** - * Return html page with information about REST api. It contains methods all - * methods provide by REST api. - * - * @return HTML page which has information about all methods of REST api. - */ - @GET - @Produces(MediaType.APPLICATION_XML) - public Report[] reportIndex() - throws WebApplicationException { - ArrayList reports = new ArrayList(); - List propNames = configurationService.getPropertyKeys("rest"); - for (String propName : propNames) { - if (propName.startsWith(REST_RPT_URL)) { - String nickname = propName.substring(REST_RPT_URL.length()); - String url = configurationService.getProperty(propName); - reports.add(new Report(nickname, url)); - } - } - return reports.toArray(new Report[0]); - } - - @Path("/{report_nickname}") - @GET - public Response customReport(@PathParam("report_nickname") String report_nickname, @Context UriInfo uriInfo) - throws WebApplicationException { - URI uri = null; - if (!report_nickname.isEmpty()) { - log.info(String.format("Seeking report %s", report_nickname)); - String url = configurationService.getProperty(REST_RPT_URL + report_nickname); - - log.info(String.format("URL for report %s found: [%s]", report_nickname, url)); - if (!url.isEmpty()) { - uri = uriInfo.getBaseUriBuilder().path(url).build(""); - log.info(String.format("URI for report %s", uri)); - } - } - - if (uri != null) { - return Response.temporaryRedirect(uri).build(); - } - - return Response.noContent().build(); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/authentication/DSpaceAuthenticationProvider.java b/dspace-rest/src/main/java/org/dspace/rest/authentication/DSpaceAuthenticationProvider.java deleted file mode 100644 index 5d0bedb4420b..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/authentication/DSpaceAuthenticationProvider.java +++ /dev/null @@ -1,130 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.authentication; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; -import javax.servlet.http.HttpServletRequest; - -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.Logger; -import org.dspace.authenticate.AuthenticationMethod; -import org.dspace.authenticate.factory.AuthenticateServiceFactory; -import org.dspace.authenticate.service.AuthenticationService; -import org.dspace.core.Context; -import org.dspace.core.LogManager; -import org.dspace.eperson.EPerson; -import org.dspace.eperson.Group; -import org.dspace.utils.DSpace; -import org.springframework.security.authentication.AuthenticationProvider; -import org.springframework.security.authentication.BadCredentialsException; -import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; -import org.springframework.security.core.Authentication; -import org.springframework.security.core.AuthenticationException; -import org.springframework.security.core.authority.SimpleGrantedAuthority; - -/** - * The core authentication & authorization provider, this provider is called when logging in & will process - * - * @author Roeland Dillen (roeland at atmire dot com) - * @author kevinvandevelde at atmire.com - * - * FIXME This provider handles both the authorization as well as the authentication, - * due to the way that the DSpace authentication is implemented there is currently no other way to do this. - */ -public class DSpaceAuthenticationProvider implements AuthenticationProvider { - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(DSpaceAuthenticationProvider.class); - - protected AuthenticationService authenticationService = AuthenticateServiceFactory.getInstance() - .getAuthenticationService(); - - @Override - public Authentication authenticate(Authentication authentication) throws AuthenticationException { - Context context = null; - - try { - context = new Context(); - String name = authentication.getName(); - String password = authentication.getCredentials().toString(); - HttpServletRequest httpServletRequest = new DSpace().getRequestService().getCurrentRequest() - .getHttpServletRequest(); - List grantedAuthorities = new ArrayList<>(); - - - int implicitStatus = authenticationService - .authenticateImplicit(context, null, null, null, httpServletRequest); - - if (implicitStatus == AuthenticationMethod.SUCCESS) { - log.info(LogManager.getHeader(context, "login", "type=implicit")); - addSpecialGroupsToGrantedAuthorityList(context, httpServletRequest, grantedAuthorities); - return createAuthenticationToken(password, context, grantedAuthorities); - - } else { - int authenticateResult = authenticationService - .authenticate(context, name, password, null, httpServletRequest); - if (AuthenticationMethod.SUCCESS == authenticateResult) { - addSpecialGroupsToGrantedAuthorityList(context, httpServletRequest, grantedAuthorities); - - log.info(LogManager - .getHeader(context, "login", "type=explicit")); - - return createAuthenticationToken(password, context, grantedAuthorities); - - } else { - log.info(LogManager.getHeader(context, "failed_login", "email=" - + name + ", result=" - + authenticateResult)); - throw new BadCredentialsException("Login failed"); - } - } - } catch (BadCredentialsException e) { - throw e; - } catch (Exception e) { - log.error("Error while authenticating in the rest api", e); - } finally { - if (context != null && context.isValid()) { - try { - context.complete(); - } catch (SQLException e) { - log.error(e.getMessage() + " occurred while trying to close", e); - } - } - } - - return null; - } - - protected void addSpecialGroupsToGrantedAuthorityList(Context context, HttpServletRequest httpServletRequest, - List grantedAuthorities) - throws SQLException { - List groups = authenticationService.getSpecialGroups(context, httpServletRequest); - for (Group group : groups) { - grantedAuthorities.add(new SimpleGrantedAuthority(group.getName())); - } - } - - private Authentication createAuthenticationToken(final String password, final Context context, - final List grantedAuthorities) { - EPerson ePerson = context.getCurrentUser(); - if (ePerson != null && StringUtils.isNotBlank(ePerson.getEmail())) { - return new UsernamePasswordAuthenticationToken(ePerson.getEmail(), password, grantedAuthorities); - - } else { - log.info( - LogManager.getHeader(context, "failed_login", "No eperson with an non-blank e-mail address found")); - throw new BadCredentialsException("Login failed"); - } - } - - @Override - public boolean supports(Class authentication) { - return (UsernamePasswordAuthenticationToken.class.isAssignableFrom(authentication)); - } -} \ No newline at end of file diff --git a/dspace-rest/src/main/java/org/dspace/rest/authentication/NoRedirectAuthenticationLoginSuccessHandler.java b/dspace-rest/src/main/java/org/dspace/rest/authentication/NoRedirectAuthenticationLoginSuccessHandler.java deleted file mode 100644 index af146f27b71c..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/authentication/NoRedirectAuthenticationLoginSuccessHandler.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.authentication; - -import java.io.IOException; -import javax.annotation.PostConstruct; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.springframework.security.web.RedirectStrategy; -import org.springframework.security.web.authentication.SimpleUrlAuthenticationSuccessHandler; - -/** - * @author kevinvandevelde at atmire.com - * - * Spring redirects to the home page after a successfull login. This success handles ensures that this is NOT the case. - */ -public class NoRedirectAuthenticationLoginSuccessHandler extends SimpleUrlAuthenticationSuccessHandler { - - @PostConstruct - public void afterPropertiesSet() { - setRedirectStrategy(new NoRedirectStrategy()); - } - - protected class NoRedirectStrategy implements RedirectStrategy { - - @Override - public void sendRedirect(HttpServletRequest request, - HttpServletResponse response, String url) throws IOException { - // no redirect - - } - - } - -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/authentication/NoRedirectAuthenticationLogoutSuccessHandler.java b/dspace-rest/src/main/java/org/dspace/rest/authentication/NoRedirectAuthenticationLogoutSuccessHandler.java deleted file mode 100644 index db28f2e388c0..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/authentication/NoRedirectAuthenticationLogoutSuccessHandler.java +++ /dev/null @@ -1,39 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.authentication; - -import java.io.IOException; -import javax.annotation.PostConstruct; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.springframework.security.web.RedirectStrategy; -import org.springframework.security.web.authentication.logout.SimpleUrlLogoutSuccessHandler; - -/** - * @author kevinvandevelde at atmire.com - * - * Spring redirects to the home page after a successfull logout. This success handles ensures that this is NOT the case. - */ -public class NoRedirectAuthenticationLogoutSuccessHandler extends SimpleUrlLogoutSuccessHandler { - @PostConstruct - public void afterPropertiesSet() { - setRedirectStrategy(new NoRedirectStrategy()); - } - - protected class NoRedirectStrategy implements RedirectStrategy { - - @Override - public void sendRedirect(HttpServletRequest request, - HttpServletResponse response, String url) throws IOException { - // no redirect - - } - - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/Bitstream.java b/dspace-rest/src/main/java/org/dspace/rest/common/Bitstream.java deleted file mode 100644 index 7eb198990e85..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/Bitstream.java +++ /dev/null @@ -1,199 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import javax.servlet.ServletContext; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.logging.log4j.Logger; -import org.dspace.content.Bundle; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.BitstreamService; -import org.dspace.content.service.BundleService; -import org.dspace.core.Constants; -import org.dspace.core.Context; -import org.dspace.utils.DSpace; - -/** - * Created with IntelliJ IDEA. - * User: peterdietz - * Date: 9/21/13 - * Time: 12:54 AM - * To change this template use File | Settings | File Templates. - */ -@XmlRootElement(name = "bitstream") -public class Bitstream extends DSpaceObject { - protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); - protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService(); - - Logger log = org.apache.logging.log4j.LogManager.getLogger(Bitstream.class); - - private String bundleName; - private String description; - private String format; - private String mimeType; - private Long sizeBytes; - private DSpaceObject parentObject; - private String retrieveLink; - private CheckSum checkSum; - private Integer sequenceId; - - private ResourcePolicy[] policies = null; - - public Bitstream() { - - } - - public Bitstream(org.dspace.content.Bitstream bitstream, ServletContext servletContext, String expand, - Context context) - throws SQLException { - super(bitstream, servletContext); - setup(bitstream, servletContext, expand, context); - } - - public void setup(org.dspace.content.Bitstream bitstream, ServletContext servletContext, String expand, - Context context) - throws SQLException { - List expandFields = new ArrayList(); - if (expand != null) { - expandFields = Arrays.asList(expand.split(",")); - } - - //A logo bitstream might not have a bundle... - if (bitstream.getBundles() != null && !bitstream.getBundles().isEmpty()) { - if (bitstreamService.getParentObject(context, bitstream).getType() == Constants.ITEM) { - bundleName = bitstream.getBundles().get(0).getName(); - } - } - - description = bitstream.getDescription(); - format = bitstreamService.getFormatDescription(context, bitstream); - sizeBytes = bitstream.getSizeBytes(); - String path = new DSpace().getRequestService().getCurrentRequest().getHttpServletRequest().getContextPath(); - retrieveLink = path + "/bitstreams/" + bitstream.getID() + "/retrieve"; - mimeType = bitstreamService.getFormat(context, bitstream).getMIMEType(); - sequenceId = bitstream.getSequenceID(); - CheckSum checkSum = new CheckSum(); - checkSum.setCheckSumAlgorith(bitstream.getChecksumAlgorithm()); - checkSum.setValue(bitstream.getChecksum()); - this.setCheckSum(checkSum); - - if (expandFields.contains("parent") || expandFields.contains("all")) { - parentObject = new DSpaceObject(bitstreamService.getParentObject(context, bitstream), servletContext); - } else { - this.addExpand("parent"); - } - - if (expandFields.contains("policies") || expandFields.contains("all")) { - // Find policies without context. - List tempPolicies = new ArrayList(); - List bundles = bitstream.getBundles(); - for (Bundle bundle : bundles) { - List bitstreamsPolicies = bundleService - .getBitstreamPolicies(context, bundle); - for (org.dspace.authorize.ResourcePolicy policy : bitstreamsPolicies) { - if (policy.getdSpaceObject().equals(bitstream)) { - tempPolicies.add(new ResourcePolicy(policy)); - } - } - } - - policies = tempPolicies.toArray(new ResourcePolicy[0]); - } else { - this.addExpand("policies"); - } - - if (!expandFields.contains("all")) { - this.addExpand("all"); - } - } - - public Integer getSequenceId() { - return sequenceId; - } - - public void setSequenceId(Integer sequenceId) { - this.sequenceId = sequenceId; - } - - public String getBundleName() { - return bundleName; - } - - public void setBundleName(String bundleName) { - this.bundleName = bundleName; - } - - public void setDescription(String description) { - this.description = description; - } - - public void setFormat(String format) { - this.format = format; - } - - public void setMimeType(String mimeType) { - this.mimeType = mimeType; - } - - public void setSizeBytes(Long sizeBytes) { - this.sizeBytes = sizeBytes; - } - - public void setParentObject(DSpaceObject parentObject) { - this.parentObject = parentObject; - } - - public void setRetrieveLink(String retrieveLink) { - this.retrieveLink = retrieveLink; - } - - public String getDescription() { - return description; - } - - public String getFormat() { - return format; - } - - public String getMimeType() { - return mimeType; - } - - public Long getSizeBytes() { - return sizeBytes; - } - - public String getRetrieveLink() { - return retrieveLink; - } - - public DSpaceObject getParentObject() { - return parentObject; - } - - public CheckSum getCheckSum() { - return checkSum; - } - - public void setCheckSum(CheckSum checkSum) { - this.checkSum = checkSum; - } - - public ResourcePolicy[] getPolicies() { - return policies; - } - - public void setPolicies(ResourcePolicy[] policies) { - this.policies = policies; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/CheckSum.java b/dspace-rest/src/main/java/org/dspace/rest/common/CheckSum.java deleted file mode 100644 index 2db36ae9a0f8..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/CheckSum.java +++ /dev/null @@ -1,40 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.rest.common; - -import javax.xml.bind.annotation.XmlAttribute; -import javax.xml.bind.annotation.XmlType; -import javax.xml.bind.annotation.XmlValue; - -@XmlType -public class CheckSum { - String checkSumAlgorithm; - String value; - - public CheckSum() { - } - - @XmlAttribute(name = "checkSumAlgorithm") - public String getCheckSumAlgorith() { - return checkSumAlgorithm; - } - - public void setCheckSumAlgorith(String checkSumAlgorith) { - this.checkSumAlgorithm = checkSumAlgorith; - } - - @XmlValue - public String getValue() { - return value; - } - - public void setValue(String value) { - this.value = value; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/Collection.java b/dspace-rest/src/main/java/org/dspace/rest/common/Collection.java deleted file mode 100644 index be6e698b4d41..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/Collection.java +++ /dev/null @@ -1,225 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import static org.dspace.content.service.DSpaceObjectService.MD_COPYRIGHT_TEXT; -import static org.dspace.content.service.DSpaceObjectService.MD_INTRODUCTORY_TEXT; -import static org.dspace.content.service.DSpaceObjectService.MD_SHORT_DESCRIPTION; -import static org.dspace.content.service.DSpaceObjectService.MD_SIDEBAR_TEXT; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import javax.servlet.ServletContext; -import javax.ws.rs.WebApplicationException; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.logging.log4j.Logger; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.CommunityService; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; - -/** - * Created with IntelliJ IDEA. - * User: peterdietz - * Date: 5/22/13 - * Time: 9:41 AM - */ -@XmlRootElement(name = "collection") -public class Collection extends DSpaceObject { - protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - - Logger log = org.apache.logging.log4j.LogManager.getLogger(Collection.class); - - //Relationships - private Bitstream logo; - private Community parentCommunity; - private List parentCommunityList = new ArrayList<>(); - - private List items = new ArrayList<>(); - - //Collection-Metadata - private String license; - private String copyrightText; - private String introductoryText; - private String shortDescription; - private String sidebarText; - - //Calculated - private Integer numberItems; - - public Collection() { - } - - public Collection(org.dspace.content.Collection collection, ServletContext servletContext, String expand, - Context context, Integer limit, Integer offset) - throws SQLException, WebApplicationException { - super(collection, servletContext); - setup(collection, servletContext, expand, context, limit, offset); - } - - private void setup(org.dspace.content.Collection collection, ServletContext servletContext, String expand, - Context context, Integer limit, Integer offset) - throws SQLException { - List expandFields = new ArrayList<>(); - if (expand != null) { - expandFields = Arrays.asList(expand.split(",")); - } - - this.setCopyrightText(collectionService.getMetadataFirstValue(collection, - MD_COPYRIGHT_TEXT, org.dspace.content.Item.ANY)); - this.setIntroductoryText(collectionService.getMetadataFirstValue(collection, - MD_INTRODUCTORY_TEXT, org.dspace.content.Item.ANY)); - this.setShortDescription(collectionService.getMetadataFirstValue(collection, - MD_SHORT_DESCRIPTION, org.dspace.content.Item.ANY)); - this.setSidebarText(collectionService.getMetadataFirstValue(collection, - MD_SIDEBAR_TEXT, org.dspace.content.Item.ANY)); - - if (expandFields.contains("parentCommunityList") || expandFields.contains("all")) { - List parentCommunities = communityService.getAllParents(context, collection); - for (org.dspace.content.Community parentCommunity : parentCommunities) { - this.addParentCommunityList(new Community(parentCommunity, servletContext, null, context)); - } - } else { - this.addExpand("parentCommunityList"); - } - - if (expandFields.contains("parentCommunity") | expandFields.contains("all")) { - org.dspace.content.Community parentCommunity = - (org.dspace.content.Community) collectionService - .getParentObject(context, collection); - this.setParentCommunity(new Community( - parentCommunity, servletContext, null, context)); - } else { - this.addExpand("parentCommunity"); - } - - //TODO: Item paging. limit, offset/page - if (expandFields.contains("items") || expandFields.contains("all")) { - Iterator childItems = - itemService.findByCollection(context, collection, limit, offset); - - items = new ArrayList<>(); - while (childItems.hasNext()) { - org.dspace.content.Item item = childItems.next(); - - if (itemService.isItemListedForUser(context, item)) { - items.add(new Item(item, servletContext, null, context)); - } - } - } else { - this.addExpand("items"); - } - - if (expandFields.contains("license") || expandFields.contains("all")) { - setLicense(collectionService.getLicense(collection)); - } else { - this.addExpand("license"); - } - - if (expandFields.contains("logo") || expandFields.contains("all")) { - if (collection.getLogo() != null) { - this.logo = new Bitstream(collection.getLogo(), servletContext, null, context); - } - } else { - this.addExpand("logo"); - } - - if (!expandFields.contains("all")) { - this.addExpand("all"); - } - - this.setNumberItems(itemService.countItems(context, collection)); - } - - public Bitstream getLogo() { - return logo; - } - - public Integer getNumberItems() { - return numberItems; - } - - public void setNumberItems(Integer numberItems) { - this.numberItems = numberItems; - } - - public Community getParentCommunity() { - return parentCommunity; - } - - public void setParentCommunity(Community parentCommunity) { - this.parentCommunity = parentCommunity; - } - - public List getItems() { - return items; - } - - public void setItems(List items) { - this.items = items; - } - - public void setParentCommunityList(List parentCommunityList) { - this.parentCommunityList = parentCommunityList; - } - - public List getParentCommunityList() { - return parentCommunityList; - } - - public void addParentCommunityList(Community parentCommunity) { - this.parentCommunityList.add(parentCommunity); - } - - public String getLicense() { - return license; - } - - public void setLicense(String license) { - this.license = license; - } - - public String getCopyrightText() { - return copyrightText; - } - - public void setCopyrightText(String copyrightText) { - this.copyrightText = copyrightText; - } - - public String getIntroductoryText() { - return introductoryText; - } - - public void setIntroductoryText(String introductoryText) { - this.introductoryText = introductoryText; - } - - public String getShortDescription() { - return shortDescription; - } - - public void setShortDescription(String shortDescription) { - this.shortDescription = shortDescription; - } - - public String getSidebarText() { - return sidebarText; - } - - public void setSidebarText(String sidebarText) { - this.sidebarText = sidebarText; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/Community.java b/dspace-rest/src/main/java/org/dspace/rest/common/Community.java deleted file mode 100644 index e6e4716eab24..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/Community.java +++ /dev/null @@ -1,217 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import static org.dspace.content.service.DSpaceObjectService.MD_COPYRIGHT_TEXT; -import static org.dspace.content.service.DSpaceObjectService.MD_INTRODUCTORY_TEXT; -import static org.dspace.content.service.DSpaceObjectService.MD_SHORT_DESCRIPTION; -import static org.dspace.content.service.DSpaceObjectService.MD_SIDEBAR_TEXT; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import javax.servlet.ServletContext; -import javax.ws.rs.WebApplicationException; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CommunityService; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; - -/** - * Created with IntelliJ IDEA. - * User: peterdietz - * Date: 5/22/13 - * Time: 9:41 AM - * To change this template use File | Settings | File Templates. - */ -@XmlRootElement(name = "community") -public class Community extends DSpaceObject { - protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(Community.class); - - //Exandable relationships - private Bitstream logo; - - private Community parentCommunity; - - private String copyrightText; - private String introductoryText; - private String shortDescription; - private String sidebarText; - private Integer countItems; - - private List subcommunities = new ArrayList<>(); - - private List collections = new ArrayList<>(); - - public Community() { - } - - public Community(org.dspace.content.Community community, ServletContext servletContext, String expand, - Context context) - throws SQLException, WebApplicationException { - super(community, servletContext); - setup(community, servletContext, expand, context); - } - - private void setup(org.dspace.content.Community community, ServletContext servletContext, String expand, - Context context) - throws SQLException { - List expandFields = new ArrayList<>(); - if (expand != null) { - expandFields = Arrays.asList(expand.split(",")); - } - - this.setCopyrightText(communityService.getMetadataFirstValue(community, - MD_COPYRIGHT_TEXT, org.dspace.content.Item.ANY)); - this.setIntroductoryText(communityService.getMetadataFirstValue(community, - MD_INTRODUCTORY_TEXT, org.dspace.content.Item.ANY)); - this.setShortDescription(communityService.getMetadataFirstValue(community, - MD_SHORT_DESCRIPTION, org.dspace.content.Item.ANY)); - this.setSidebarText(communityService.getMetadataFirstValue(community, - MD_SIDEBAR_TEXT, org.dspace.content.Item.ANY)); - this.setCountItems(itemService.countItems(context, community)); - - if (expandFields.contains("parentCommunity") || expandFields.contains("all")) { - org.dspace.content.Community parentCommunity = (org.dspace.content.Community) communityService - .getParentObject(context, community); - if (parentCommunity != null) { - setParentCommunity(new Community(parentCommunity, servletContext, null, context)); - } - } else { - this.addExpand("parentCommunity"); - } - - if (expandFields.contains("collections") || expandFields.contains("all")) { - List collections = community.getCollections(); - List restCollections = new ArrayList<>(); - - for (org.dspace.content.Collection collection : collections) { - if (authorizeService.authorizeActionBoolean(context, collection, org.dspace.core.Constants.READ)) { - restCollections.add(new Collection(collection, servletContext, null, context, null, null)); - } else { - log.info("Omitted restricted collection: " + collection.getID() + " _ " + collection.getName()); - } - } - setCollections(restCollections); - } else { - this.addExpand("collections"); - } - - if (expandFields.contains("subCommunities") || expandFields.contains("all")) { - List communities = community.getSubcommunities(); - subcommunities = new ArrayList<>(); - for (org.dspace.content.Community subCommunity : communities) { - if (authorizeService.authorizeActionBoolean(context, subCommunity, org.dspace.core.Constants.READ)) { - subcommunities.add(new Community(subCommunity, servletContext, null, context)); - } else { - log.info( - "Omitted restricted subCommunity: " + subCommunity.getID() + " _ " + subCommunity.getName()); - } - } - } else { - this.addExpand("subCommunities"); - } - - if (expandFields.contains("logo") || expandFields.contains("all")) { - if (community.getLogo() != null) { - logo = new Bitstream(community.getLogo(), servletContext, null, context); - } - } else { - this.addExpand("logo"); - } - - if (!expandFields.contains("all")) { - this.addExpand("all"); - } - } - - public List getCollections() { - return collections; - } - - public void setCollections(List collections) { - this.collections = collections; - } - - public Integer getCountItems() { - return countItems; - } - - public void setCountItems(Integer countItems) { - this.countItems = countItems; - } - - public String getSidebarText() { - return sidebarText; - } - - public void setSidebarText(String sidebarText) { - this.sidebarText = sidebarText; - } - - public String getShortDescription() { - return shortDescription; - } - - public void setShortDescription(String shortDescription) { - this.shortDescription = shortDescription; - } - - public String getIntroductoryText() { - return introductoryText; - } - - public void setIntroductoryText(String introductoryText) { - this.introductoryText = introductoryText; - } - - public String getCopyrightText() { - return copyrightText; - } - - public void setCopyrightText(String copyrightText) { - this.copyrightText = copyrightText; - } - - public Community getParentCommunity() { - return parentCommunity; - } - - public void setParentCommunity(Community parentCommunity) { - this.parentCommunity = parentCommunity; - } - - public Bitstream getLogo() { - return logo; - } - - public void setLogo(Bitstream logo) { - this.logo = logo; - } - - // Renamed because of xml annotation exception with this attribute and getSubCommunities. - @XmlElement(name = "subcommunities", required = true) - public List getSubcommunities() { - return subcommunities; - } - - public void setSubcommunities(List subcommunities) { - this.subcommunities = subcommunities; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/DSpaceObject.java b/dspace-rest/src/main/java/org/dspace/rest/common/DSpaceObject.java deleted file mode 100644 index 08df254336f7..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/DSpaceObject.java +++ /dev/null @@ -1,107 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import java.util.ArrayList; -import java.util.List; -import javax.servlet.ServletContext; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.atteo.evo.inflector.English; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.DSpaceObjectService; - -/** - * Created with IntelliJ IDEA. - * User: peterdietz - * Date: 10/7/13 - * Time: 12:11 PM - * To change this template use File | Settings | File Templates. - */ -@XmlRootElement(name = "dspaceobject") -public class DSpaceObject { - - private String uuid; - - private String name; - private String handle; - private String type; - - @XmlElement(name = "link", required = true) - private String link; - - @XmlElement(required = true) - private ArrayList expand = new ArrayList(); - - public DSpaceObject() { - - } - - public DSpaceObject(org.dspace.content.DSpaceObject dso, ServletContext servletContext) { - setUUID(dso.getID().toString()); - setName(dso.getName()); - setHandle(dso.getHandle()); - DSpaceObjectService dspaceObjectService = ContentServiceFactory.getInstance().getDSpaceObjectService(dso); - setType(dspaceObjectService.getTypeText(dso).toLowerCase()); - link = createLink(servletContext); - } - - public String getName() { - return this.name; - } - - public void setName(String name) { - this.name = name; - } - - public String getHandle() { - return handle; - } - - public void setHandle(String handle) { - this.handle = handle; - } - - public String getLink() { - return link; - } - - public String getType() { - return this.type; - } - - public void setType(String type) { - this.type = type; - } - - - public List getExpand() { - return expand; - } - - public void setExpand(ArrayList expand) { - this.expand = expand; - } - - public void addExpand(String expandableAttribute) { - this.expand.add(expandableAttribute); - } - - public String getUUID() { - return uuid; - } - - public void setUUID(String uuid) { - this.uuid = uuid; - } - - private String createLink(ServletContext context) { - return context.getContextPath() + "/" + English.plural(getType()) + "/" + getUUID(); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/FilteredCollection.java b/dspace-rest/src/main/java/org/dspace/rest/common/FilteredCollection.java deleted file mode 100644 index c7ff0ef9b3a0..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/FilteredCollection.java +++ /dev/null @@ -1,191 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import javax.servlet.ServletContext; -import javax.ws.rs.WebApplicationException; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.logging.log4j.Logger; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.CommunityService; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; -import org.dspace.rest.filter.ItemFilterSet; - -/** - * Retrieve items within a collection that match a specific set of Item Filters of interest - * - * @author Terry Brady, Georgetown University - */ -@XmlRootElement(name = "filtered-collection") -public class FilteredCollection extends DSpaceObject { - protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); - protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); - protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - Logger log = org.apache.logging.log4j.LogManager.getLogger(FilteredCollection.class); - - //Relationships - private Community parentCommunity; - private Community topCommunity; - private List parentCommunityList = new ArrayList(); - - private List items = new ArrayList(); - - private List itemFilters = new ArrayList(); - - //Calculated - private Integer numberItems; - private Integer numberItemsProcessed; - - public FilteredCollection() { - } - - /** - * Evaluate a collection against of set of Item Filters - * - * @param collection DSpace Collection to evaluate - * @param servletContext Context of the servlet container. - * @param filters String representing a list of filters - * @param expand String in which is what you want to add to returned instance - * of collection. Options are: "all", "parentCommunityList", - * "parentCommunity", "items", "license" and "logo". If you want - * to use multiple options, it must be separated by commas. - * @param context The relevant DSpace Context. - * @param limit Limit value for items in list in collection. Default value is 100. - * @param offset Offset of start index in list of items of collection. Default - * value is 0. - * @throws SQLException An exception that provides information on a database access error or other - * errors. - * @throws WebApplicationException Runtime exception for applications. - */ - public FilteredCollection(org.dspace.content.Collection collection, ServletContext servletContext, String filters, - String expand, Context context, Integer limit, Integer offset) - throws SQLException, WebApplicationException { - super(collection, servletContext); - setup(collection, servletContext, expand, context, limit, offset, filters); - } - - private void setup(org.dspace.content.Collection collection, ServletContext servletContext, String expand, - Context context, Integer limit, Integer offset, String filters) throws SQLException { - List expandFields = new ArrayList(); - if (expand != null) { - expandFields = Arrays.asList(expand.split(",")); - } - - if (expandFields.contains("parentCommunityList") || expandFields.contains("all")) { - List parentCommunities = communityService.getAllParents(context, collection); - List parentCommunityList = new ArrayList(); - for (org.dspace.content.Community parentCommunity : parentCommunities) { - parentCommunityList.add(new Community(parentCommunity, servletContext, null, context)); - } - this.setParentCommunityList(parentCommunityList); - } else { - this.addExpand("parentCommunityList"); - } - - if (expandFields.contains("parentCommunity") | expandFields.contains("all")) { - org.dspace.content.Community parentCommunity = collection.getCommunities().get(0); - this.setParentCommunity(new Community(parentCommunity, servletContext, null, context)); - } else { - this.addExpand("parentCommunity"); - } - - if (expandFields.contains("topCommunity") | expandFields.contains("all")) { - List parentCommunities = communityService.getAllParents(context, collection); - if (parentCommunities.size() > 0) { - org.dspace.content.Community topCommunity = parentCommunities.get(parentCommunities.size() - 1); - this.setTopCommunity(new Community(topCommunity, servletContext, null, context)); - } - } else { - this.addExpand("topCommunity"); - } - - - boolean reportItems = expandFields.contains("items") || expandFields.contains("all"); - ItemFilterSet itemFilterSet = new ItemFilterSet(filters, reportItems); - this.setItemFilters(itemFilterSet.getItemFilters()); - - this.setNumberItemsProcessed(0); - if (itemFilters.size() > 0) { - Iterator childItems = itemService - .findAllByCollection(context, collection, limit, offset); - int numProc = itemFilterSet - .processSaveItems(context, servletContext, childItems, items, reportItems, expand); - this.setNumberItemsProcessed(numProc); - } - - if (!expandFields.contains("all")) { - this.addExpand("all"); - } - this.setNumberItems(itemService.countAllItems(context, collection)); - } - - public Integer getNumberItems() { - return numberItems; - } - - public void setNumberItems(Integer numberItems) { - this.numberItems = numberItems; - } - - public Integer getNumberItemsProcessed() { - return numberItemsProcessed; - } - - public void setNumberItemsProcessed(Integer numberItemsProcessed) { - this.numberItemsProcessed = numberItemsProcessed; - } - - public Community getParentCommunity() { - return parentCommunity; - } - - public void setParentCommunity(Community parentCommunity) { - this.parentCommunity = parentCommunity; - } - - public Community getTopCommunity() { - return topCommunity; - } - - public void setTopCommunity(Community topCommunity) { - this.topCommunity = topCommunity; - } - - - public List getItems() { - return items; - } - - public void setItems(List items) { - this.items = items; - } - - public void setParentCommunityList(List parentCommunityList) { - this.parentCommunityList = parentCommunityList; - } - - public List getParentCommunityList() { - return parentCommunityList; - } - - public List getItemFilters() { - return itemFilters; - } - - public void setItemFilters(List itemFilters) { - this.itemFilters = itemFilters; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/HierarchyCollection.java b/dspace-rest/src/main/java/org/dspace/rest/common/HierarchyCollection.java deleted file mode 100644 index 6c40faf62bf4..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/HierarchyCollection.java +++ /dev/null @@ -1,24 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import javax.xml.bind.annotation.XmlRootElement; - -/** - * Used to handle/determine status of REST API. - * Mainly to know your authentication status - */ -@XmlRootElement(name = "collection") -public class HierarchyCollection extends HierarchyObject { - public HierarchyCollection() { - } - - public HierarchyCollection(String id, String name, String handle) { - super(id, name, handle); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/HierarchyCommunity.java b/dspace-rest/src/main/java/org/dspace/rest/common/HierarchyCommunity.java deleted file mode 100644 index 3618608e3e87..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/HierarchyCommunity.java +++ /dev/null @@ -1,44 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import java.util.ArrayList; -import java.util.List; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -@XmlRootElement(name = "community") -public class HierarchyCommunity extends HierarchyObject { - private List communities = new ArrayList(); - private List collections = new ArrayList(); - - public HierarchyCommunity() { - } - - public HierarchyCommunity(String id, String name, String handle) { - super(id, name, handle); - } - - @XmlElement(name = "community") - public List getCommunities() { - return communities; - } - - public void setCommunities(List communities) { - this.communities = communities; - } - - @XmlElement(name = "collection") - public List getCollections() { - return collections; - } - - public void setCollections(List collections) { - this.collections = collections; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/HierarchyObject.java b/dspace-rest/src/main/java/org/dspace/rest/common/HierarchyObject.java deleted file mode 100644 index 0074eeea6a0e..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/HierarchyObject.java +++ /dev/null @@ -1,51 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import javax.xml.bind.annotation.XmlRootElement; - -@XmlRootElement(name = "object") -public class HierarchyObject { - //id may be a numeric id or a uuid depending on the version of DSpace - private String id; - private String name; - private String handle; - - public HierarchyObject() { - } - - public HierarchyObject(String id, String name, String handle) { - setId(id); - setName(name); - setHandle(handle); - } - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getHandle() { - return handle; - } - - public void setHandle(String handle) { - this.handle = handle; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/HierarchySite.java b/dspace-rest/src/main/java/org/dspace/rest/common/HierarchySite.java deleted file mode 100644 index 5eb2cc523cee..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/HierarchySite.java +++ /dev/null @@ -1,24 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import javax.xml.bind.annotation.XmlRootElement; - -/** - * Used to handle/determine status of REST API. - * Mainly to know your authentication status - */ -@XmlRootElement(name = "site") -public class HierarchySite extends HierarchyCommunity { - public HierarchySite() { - } - - public HierarchySite(String id, String name, String handle) { - super(id, name, handle); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/Item.java b/dspace-rest/src/main/java/org/dspace/rest/common/Item.java deleted file mode 100644 index 3794153b7d7e..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/Item.java +++ /dev/null @@ -1,219 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import javax.servlet.ServletContext; -import javax.ws.rs.WebApplicationException; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.logging.log4j.Logger; -import org.dspace.app.util.factory.UtilServiceFactory; -import org.dspace.app.util.service.MetadataExposureService; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.Bundle; -import org.dspace.content.MetadataField; -import org.dspace.content.MetadataValue; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; - -/** - * Created with IntelliJ IDEA. - * User: peterdietz - * Date: 9/19/13 - * Time: 4:50 PM - * To change this template use File | Settings | File Templates. - */ -@SuppressWarnings("deprecation") -@XmlRootElement(name = "item") -public class Item extends DSpaceObject { - protected ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - protected MetadataExposureService metadataExposureService = UtilServiceFactory.getInstance() - .getMetadataExposureService(); - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - - Logger log = org.apache.logging.log4j.LogManager.getLogger(Item.class); - - String isArchived; - String isWithdrawn; - String lastModified; - - Collection parentCollection; - List parentCollectionList; - List parentCommunityList; - List metadata; - List bitstreams; - - public Item() { - } - - public Item(org.dspace.content.Item item, ServletContext servletContext, String expand, Context context) - throws SQLException, WebApplicationException { - super(item, servletContext); - setup(item, servletContext, expand, context); - } - - private void setup(org.dspace.content.Item item, ServletContext servletContext, String expand, Context context) - throws SQLException { - List expandFields = new ArrayList(); - if (expand != null) { - expandFields = Arrays.asList(expand.split(",")); - } - - if (expandFields.contains("metadata") || expandFields.contains("all")) { - metadata = new ArrayList(); - List metadataValues = itemService.getMetadata( - item, org.dspace.content.Item.ANY, org.dspace.content.Item.ANY, - org.dspace.content.Item.ANY, org.dspace.content.Item.ANY); - - for (MetadataValue metadataValue : metadataValues) { - MetadataField metadataField = metadataValue.getMetadataField(); - if (!metadataExposureService.isHidden(context, - metadataField.getMetadataSchema().getName(), - metadataField.getElement(), - metadataField.getQualifier())) { - metadata.add(new MetadataEntry(metadataField.toString('.'), - metadataValue.getValue(), metadataValue.getLanguage())); - } - } - } else { - this.addExpand("metadata"); - } - - this.setArchived(Boolean.toString(item.isArchived())); - this.setWithdrawn(Boolean.toString(item.isWithdrawn())); - this.setLastModified(item.getLastModified().toString()); - - if (expandFields.contains("parentCollection") || expandFields.contains("all")) { - if (item.getOwningCollection() != null) { - this.parentCollection = new Collection(item.getOwningCollection(), - servletContext, null, context, null, null); - } else { - this.addExpand("parentCollection"); - } - } else { - this.addExpand("parentCollection"); - } - - if (expandFields.contains("parentCollectionList") || expandFields.contains("all")) { - this.parentCollectionList = new ArrayList(); - List collections = item.getCollections(); - for (org.dspace.content.Collection collection : collections) { - this.parentCollectionList.add(new Collection(collection, - servletContext, null, context, null, null)); - } - } else { - this.addExpand("parentCollectionList"); - } - - if (expandFields.contains("parentCommunityList") || expandFields.contains("all")) { - this.parentCommunityList = new ArrayList(); - List communities = itemService.getCommunities(context, item); - - for (org.dspace.content.Community community : communities) { - this.parentCommunityList.add(new Community(community, servletContext, null, context)); - } - } else { - this.addExpand("parentCommunityList"); - } - - //TODO: paging - offset, limit - if (expandFields.contains("bitstreams") || expandFields.contains("all")) { - bitstreams = new ArrayList(); - - List bundles = item.getBundles(); - for (Bundle bundle : bundles) { - - List itemBitstreams = bundle.getBitstreams(); - for (org.dspace.content.Bitstream itemBitstream : itemBitstreams) { - if (authorizeService - .authorizeActionBoolean(context, itemBitstream, org.dspace.core.Constants.READ)) { - bitstreams.add(new Bitstream(itemBitstream, servletContext, null, context)); - } - } - } - } else { - this.addExpand("bitstreams"); - } - - if (!expandFields.contains("all")) { - this.addExpand("all"); - } - } - - public String getArchived() { - return isArchived; - } - - public void setArchived(String archived) { - isArchived = archived; - } - - public String getWithdrawn() { - return isWithdrawn; - } - - public void setWithdrawn(String withdrawn) { - isWithdrawn = withdrawn; - } - - public String getLastModified() { - return lastModified; - } - - public void setLastModified(String lastModified) { - this.lastModified = lastModified; - } - - public Collection getParentCollection() { - return parentCollection; - } - - public List getParentCollectionList() { - return parentCollectionList; - } - - public List getMetadata() { - return metadata; - } - - public List getBitstreams() { - return bitstreams; - } - - public List getParentCommunityList() { - return parentCommunityList; - } - - public void setParentCollection(Collection parentCollection) { - this.parentCollection = parentCollection; - } - - public void setParentCollectionList(List parentCollectionList) { - this.parentCollectionList = parentCollectionList; - } - - public void setParentCommunityList(List parentCommunityList) { - this.parentCommunityList = parentCommunityList; - } - - @XmlElement(required = true) - public void setMetadata(List metadata) { - this.metadata = metadata; - } - - public void setBitstreams(List bitstreams) { - this.bitstreams = bitstreams; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/ItemFilter.java b/dspace-rest/src/main/java/org/dspace/rest/common/ItemFilter.java deleted file mode 100644 index bc5bd1313477..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/ItemFilter.java +++ /dev/null @@ -1,274 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import javax.ws.rs.WebApplicationException; -import javax.xml.bind.annotation.XmlAttribute; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.logging.log4j.Logger; -import org.dspace.core.Context; -import org.dspace.core.factory.CoreServiceFactory; -import org.dspace.rest.filter.ItemFilterDefs; -import org.dspace.rest.filter.ItemFilterList; -import org.dspace.rest.filter.ItemFilterTest; - - -/** - * Use Case Item Filters that match a specific set of criteria. - * - * @author Terry Brady, Georgetown University - */ -@XmlRootElement(name = "item-filter") -public class ItemFilter { - static Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemFilter.class); - - private ItemFilterTest itemFilterTest = null; - private String filterName = ""; - private String title; - private String description; - private String category; - private String queryAnnotation; - private List items = new ArrayList(); - private List itemFilterQueries = new ArrayList(); - private List metadata = new ArrayList(); - private Integer itemCount; - private Integer unfilteredItemCount; - private boolean saveItems = false; - - public ItemFilter() { - } - - public static final String ALL_FILTERS = "all_filters"; - public static final String ALL = "all"; - - public static List getItemFilters(String filters, boolean saveItems) { - LinkedHashMap availableTests = new LinkedHashMap(); - for (ItemFilterList plugobj : - (ItemFilterList[]) CoreServiceFactory.getInstance() - .getPluginService().getPluginSequence(ItemFilterList.class)) { - for (ItemFilterTest defFilter : plugobj.getFilters()) { - availableTests.put(defFilter.getName(), defFilter); - } - } - List itemFilters = new ArrayList(); - ItemFilter allFilters = new ItemFilter(ItemFilter.ALL_FILTERS, "Matches all specified filters", - "This filter includes all items that matched ALL specified filters", - ItemFilterDefs.CAT_ITEM, saveItems); - - if (filters.equals(ALL)) { - for (ItemFilterTest itemFilterDef : availableTests.values()) { - itemFilters.add(new ItemFilter(itemFilterDef, saveItems)); - } - itemFilters.add(allFilters); - } else { - for (String filter : Arrays.asList(filters.split(","))) { - if (filter.equals(ItemFilter.ALL_FILTERS)) { - continue; - } - - ItemFilterTest itemFilterDef; - itemFilterDef = availableTests.get(filter); - if (itemFilterDef == null) { - continue; - } - itemFilters.add(new ItemFilter(itemFilterDef, saveItems)); - } - itemFilters.add(allFilters); - } - return itemFilters; - } - - public static ItemFilter getAllFiltersFilter(List itemFilters) { - for (ItemFilter itemFilter : itemFilters) { - if (itemFilter.getFilterName().equals(ALL_FILTERS)) { - itemFilter.initCount(); - return itemFilter; - } - } - return null; - } - - public ItemFilter(ItemFilterTest itemFilterTest, boolean saveItems) - throws WebApplicationException { - this.itemFilterTest = itemFilterTest; - this.saveItems = saveItems; - setup(itemFilterTest.getName(), itemFilterTest.getTitle(), - itemFilterTest.getDescription(), itemFilterTest.getCategory()); - } - - public ItemFilter(String name, String title, String description, String category, boolean saveItems) - throws WebApplicationException { - this.saveItems = saveItems; - setup(name, title, description, category); - } - - private void setup(String name, String title, String description, String category) { - this.setFilterName(name); - this.setTitle(title); - this.setDescription(description); - this.setCategory(category); - } - - private void initCount() { - if (itemCount == null) { - itemCount = 0; - } - if (unfilteredItemCount == null) { - unfilteredItemCount = 0; - } - } - - public boolean hasItemTest() { - return itemFilterTest != null; - } - - public void addItem(org.dspace.rest.common.Item restItem) { - initCount(); - if (saveItems) { - items.add(restItem); - } - itemCount++; - } - - public boolean testItem(Context context, org.dspace.content.Item item, org.dspace.rest.common.Item restItem) { - initCount(); - if (itemFilterTest == null) { - return false; - } - if (itemFilterTest.testItem(context, item)) { - addItem(restItem); - return true; - } - return false; - } - - @XmlAttribute(name = "filter-name") - public String getFilterName() { - return filterName; - } - - public void setFilterName(String name) { - this.filterName = name; - } - - @XmlAttribute(name = "title") - public String getTitle() { - return title; - } - - public void setTitle(String title) { - this.title = title; - } - - @XmlAttribute(name = "category") - public String getCategory() { - return category; - } - - public void setCategory(String category) { - this.category = category; - } - - @XmlAttribute(name = "description") - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - @XmlAttribute(name = "query-annotation") - public String getQueryAnnotation() { - return queryAnnotation; - } - - public void annotateQuery(List query_field, List query_op, List query_val) - throws SQLException { - int index = Math.min(query_field.size(), Math.min(query_op.size(), query_val.size())); - StringBuilder sb = new StringBuilder(); - - for (int i = 0; i < index; i++) { - if (!sb.toString().isEmpty()) { - sb.append(" and "); - } - sb.append("("); - sb.append(query_field.get(i)); - sb.append(" "); - sb.append(query_op.get(i)); - sb.append(" "); - sb.append(query_val.get(i)); - sb.append(")"); - } - setQueryAnnotation(sb.toString()); - } - - public void setQueryAnnotation(String queryAnnotation) { - this.queryAnnotation = queryAnnotation; - } - - @XmlAttribute(name = "item-count") - public Integer getItemCount() { - return itemCount; - } - - public void setItemCount(Integer itemCount) { - this.itemCount = itemCount; - } - - @XmlAttribute(name = "unfiltered-item-count") - public Integer getUnfilteredItemCount() { - return unfilteredItemCount; - } - - public void setUnfilteredItemCount(Integer unfilteredItemCount) { - this.unfilteredItemCount = unfilteredItemCount; - } - - public List getItems() { - return items; - } - - public void setItems(List items) { - this.items = items; - } - - public List getItemFilterQueries() { - return itemFilterQueries; - } - - public void setItemFilterQueries(List itemFilterQueries) { - this.itemFilterQueries = itemFilterQueries; - } - - public void initMetadataList(List show_fields) { - if (show_fields != null) { - List returnFields = new ArrayList(); - for (String field : show_fields) { - returnFields.add(new MetadataEntry(field, null, null)); - } - setMetadata(returnFields); - } - } - - public List getMetadata() { - return metadata; - } - - @XmlElement(required = true) - public void setMetadata(List metadata) { - this.metadata = metadata; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/ItemFilterQuery.java b/dspace-rest/src/main/java/org/dspace/rest/common/ItemFilterQuery.java deleted file mode 100644 index 6f56e2b44cda..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/ItemFilterQuery.java +++ /dev/null @@ -1,77 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import javax.ws.rs.WebApplicationException; -import javax.xml.bind.annotation.XmlAttribute; -import javax.xml.bind.annotation.XmlRootElement; - -import org.apache.logging.log4j.Logger; - -/** - * Metadata Query for DSpace Items using the REST API - * - * @author Terry Brady, Georgetown University - */ -@XmlRootElement(name = "item-filter-query") -public class ItemFilterQuery { - Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemFilterQuery.class); - - private String field = ""; - private String operation = ""; - private String value = ""; - - public ItemFilterQuery() { - } - - /** - * Construct a metadata query for DSpace items - * - * @param field Name of the metadata field to query - * @param operation Operation to perform on a metadata field - * @param value Query value. - * @throws WebApplicationException Runtime exception for applications. - */ - public ItemFilterQuery(String field, String operation, String value) throws WebApplicationException { - setup(field, operation, value); - } - - private void setup(String field, String operation, String value) { - this.setField(field); - this.setOperation(operation); - this.setValue(value); - } - - @XmlAttribute(name = "field") - public String getField() { - return field; - } - - public void setField(String field) { - this.field = field; - } - - @XmlAttribute(name = "operation") - public String getOperation() { - return operation; - } - - public void setOperation(String operation) { - this.operation = operation; - } - - @XmlAttribute(name = "value") - public String getValue() { - return value; - } - - public void setValue(String value) { - this.value = value; - } - -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/MetadataEntry.java b/dspace-rest/src/main/java/org/dspace/rest/common/MetadataEntry.java deleted file mode 100644 index 27f31cec9c76..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/MetadataEntry.java +++ /dev/null @@ -1,77 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import java.util.regex.Pattern; -import javax.xml.bind.annotation.XmlRootElement; - -/** - * @author peterdietz, Rostislav Novak (Computing and Information Centre, CTU in - * Prague) - */ -@XmlRootElement(name = "metadataentry") -public class MetadataEntry { - String key; - - String value; - - String language; - - public MetadataEntry() { - } - - public MetadataEntry(String key, String value, String language) { - this.key = key; - this.value = value; - this.language = language; - } - - public String getValue() { - return value; - } - - public void setValue(String value) { - this.value = value; - } - - public String getKey() { - return key; - } - - public void setKey(String key) { - this.key = key; - } - - public String getLanguage() { - return language; - } - - public void setLanguage(String language) { - this.language = language; - } - - public String getSchema() { - String[] fieldPieces = key.split(Pattern.quote(".")); - return fieldPieces[0]; - } - - public String getElement() { - String[] fieldPieces = key.split(Pattern.quote(".")); - return fieldPieces[1]; - } - - public String getQualifier() { - String[] fieldPieces = key.split(Pattern.quote(".")); - if (fieldPieces.length == 3) { - return fieldPieces[2]; - } else { - return null; - } - } - -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/MetadataField.java b/dspace-rest/src/main/java/org/dspace/rest/common/MetadataField.java deleted file mode 100644 index 3688b5b8ca58..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/MetadataField.java +++ /dev/null @@ -1,132 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import javax.ws.rs.WebApplicationException; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.dspace.core.Context; - -/** - * Metadata field representation - * - * @author Terry Brady, Georgetown University. - */ -@XmlRootElement(name = "field") -public class MetadataField { - private int fieldId; - private String name; - private String element; - private String qualifier; - private String description; - - private MetadataSchema parentSchema; - - @XmlElement(required = true) - private ArrayList expand = new ArrayList(); - - public MetadataField() { - } - - public MetadataField(org.dspace.content.MetadataSchema schema, org.dspace.content.MetadataField field, - String expand, Context context) throws SQLException, WebApplicationException { - setup(schema, field, expand, context); - } - - private void setup(org.dspace.content.MetadataSchema schema, org.dspace.content.MetadataField field, String expand, - Context context) throws SQLException { - List expandFields = new ArrayList(); - if (expand != null) { - expandFields = Arrays.asList(expand.split(",")); - } - StringBuilder sb = new StringBuilder(); - sb.append(schema.getName()); - sb.append("."); - sb.append(field.getElement()); - if (field.getQualifier() != null) { - sb.append("."); - sb.append(field.getQualifier()); - } - - this.setName(sb.toString()); - this.setFieldId(field.getID()); - this.setElement(field.getElement()); - this.setQualifier(field.getQualifier()); - this.setDescription(field.getScopeNote()); - - if (expandFields.contains("parentSchema") || expandFields.contains("all")) { - this.addExpand("parentSchema"); - parentSchema = new MetadataSchema(schema, "", context); - } - } - - public void setParentSchema(MetadataSchema schema) { - this.parentSchema = schema; - } - - public MetadataSchema getParentSchema() { - return this.parentSchema; - } - - public void setFieldId(int fieldId) { - this.fieldId = fieldId; - } - - public void setName(String name) { - this.name = name; - } - - public void setElement(String element) { - this.element = element; - } - - public void setQualifier(String qualifier) { - this.qualifier = qualifier; - } - - public void setDescription(String description) { - this.description = description; - } - - public int getFieldId() { - return fieldId; - } - - public String getName() { - return name; - } - - public String getQualifier() { - return qualifier; - } - - public String getElement() { - return element; - } - - public String getDescription() { - return description; - } - - public List getExpand() { - return expand; - } - - public void setExpand(ArrayList expand) { - this.expand = expand; - } - - public void addExpand(String expandableAttribute) { - this.expand.add(expandableAttribute); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/MetadataSchema.java b/dspace-rest/src/main/java/org/dspace/rest/common/MetadataSchema.java deleted file mode 100644 index 4b1e29fea233..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/MetadataSchema.java +++ /dev/null @@ -1,105 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import javax.ws.rs.WebApplicationException; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; - -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.MetadataFieldService; -import org.dspace.core.Context; - -/** - * Metadata schema representation - * - * @author Terry Brady, Georgetown University. - */ -@XmlRootElement(name = "schema") -public class MetadataSchema { - protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); - - private int schemaID; - private String prefix; - private String namespace; - - @XmlElement(required = true) - private ArrayList expand = new ArrayList(); - - @XmlElement(name = "fields", required = true) - private List fields = new ArrayList(); - - public MetadataSchema() { - } - - public MetadataSchema(org.dspace.content.MetadataSchema schema, String expand, Context context) - throws SQLException, WebApplicationException { - setup(schema, expand, context); - } - - private void setup(org.dspace.content.MetadataSchema schema, String expand, Context context) throws SQLException { - List expandFields = new ArrayList(); - if (expand != null) { - expandFields = Arrays.asList(expand.split(",")); - } - this.setSchemaID(schema.getID()); - this.setPrefix(schema.getName()); - this.setNamespace(schema.getNamespace()); - if (expandFields.contains("fields") || expandFields.contains("all")) { - List fields = metadataFieldService.findAllInSchema(context, schema); - this.addExpand("fields"); - for (org.dspace.content.MetadataField field : fields) { - this.fields.add(new MetadataField(schema, field, "", context)); - } - } - } - - public void setPrefix(String prefix) { - this.prefix = prefix; - } - - public void setNamespace(String namespace) { - this.namespace = namespace; - } - - public String getPrefix() { - return prefix; - } - - public String getNamespace() { - return namespace; - } - - public int getSchemaID() { - return this.schemaID; - } - - public void setSchemaID(int schemaID) { - this.schemaID = schemaID; - } - - public List getMetadataFields() { - return fields; - } - - public List getExpand() { - return expand; - } - - public void setExpand(ArrayList expand) { - this.expand = expand; - } - - public void addExpand(String expandableAttribute) { - this.expand.add(expandableAttribute); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/Report.java b/dspace-rest/src/main/java/org/dspace/rest/common/Report.java deleted file mode 100644 index dcaf7d269eab..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/Report.java +++ /dev/null @@ -1,47 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import javax.xml.bind.annotation.XmlRootElement; - -/** - * Used to handle/determine status of REST API. - * Mainly to know your authentication status - */ -@XmlRootElement(name = "report") -public class Report { - private String nickname; - private String url; - - public Report() { - setNickname("na"); - setUrl(""); - } - - - public Report(String nickname, String url) { - setNickname(nickname); - setUrl(url); - } - - public String getUrl() { - return this.url; - } - - public String getNickname() { - return this.nickname; - } - - public void setUrl(String url) { - this.url = url; - } - - public void setNickname(String nickname) { - this.nickname = nickname; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/ResourcePolicy.java b/dspace-rest/src/main/java/org/dspace/rest/common/ResourcePolicy.java deleted file mode 100644 index 366bd5fc3a83..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/ResourcePolicy.java +++ /dev/null @@ -1,195 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import java.util.Date; -import javax.xml.bind.annotation.XmlRootElement; - -import com.fasterxml.jackson.annotation.JsonIgnore; -import org.dspace.eperson.EPerson; -import org.dspace.eperson.Group; - -@XmlRootElement(name = "resourcepolicy") -public class ResourcePolicy { - - public enum Action { - READ, WRITE, DELETE; - } - - private Integer id; - private Action action; - private String epersonId; //UUID - private String groupId; //UUID - private String resourceId; //UUID - private String resourceType; - private String rpDescription; - private String rpName; - private String rpType; - private Date startDate; - private Date endDate; - - public ResourcePolicy() { - } - - public ResourcePolicy(org.dspace.authorize.ResourcePolicy dspacePolicy) { - this.id = dspacePolicy.getID(); - - switch (dspacePolicy.getAction()) { - case org.dspace.core.Constants.READ: - this.action = Action.READ; - break; - case org.dspace.core.Constants.WRITE: - this.action = Action.WRITE; - break; - case org.dspace.core.Constants.DELETE: - this.action = Action.DELETE; - break; - default: - break; - } - - EPerson ePerson = dspacePolicy.getEPerson(); - if (ePerson != null) { - this.epersonId = ePerson.getID().toString(); - } - - Group group = dspacePolicy.getGroup(); - if (group != null) { - this.groupId = group.getID().toString(); - } - - this.resourceId = dspacePolicy.getdSpaceObject().getID().toString(); - this.rpDescription = dspacePolicy.getRpDescription(); - this.rpName = dspacePolicy.getRpName(); - this.rpType = dspacePolicy.getRpType(); - this.startDate = dspacePolicy.getStartDate(); - this.endDate = dspacePolicy.getEndDate(); - switch (dspacePolicy.getdSpaceObject().getType()) { - case org.dspace.core.Constants.BITSTREAM: - this.resourceType = "bitstream"; - break; - case org.dspace.core.Constants.ITEM: - this.resourceType = "item"; - break; - case org.dspace.core.Constants.COLLECTION: - this.resourceType = "collection"; - break; - case org.dspace.core.Constants.COMMUNITY: - this.resourceType = "community"; - break; - case org.dspace.core.Constants.BUNDLE: - this.resourceType = "bundle"; - break; - default: - this.resourceType = ""; - break; - } - } - - public Integer getId() { - return id; - } - - public void setId(Integer id) { - this.id = id; - } - - public Action getAction() { - return action; - } - - @JsonIgnore - public int getActionInt() { - switch (action) { - case READ: - return org.dspace.core.Constants.READ; - case WRITE: - return org.dspace.core.Constants.WRITE; - case DELETE: - return org.dspace.core.Constants.DELETE; - default: - return org.dspace.core.Constants.READ; - } - } - - public void setAction(Action action) { - this.action = action; - } - - public String getEpersonId() { - return epersonId; - } - - public void setEpersonId(String epersonId) { - this.epersonId = epersonId; - } - - public String getGroupId() { - return groupId; - } - - public void setGroupId(String groupId) { - this.groupId = groupId; - } - - public String getResourceId() { - return resourceId; - } - - public void setResourceId(String resourceId) { - this.resourceId = resourceId; - } - - public String getResourceType() { - return resourceType; - } - - public void setResourceType(String resourceType) { - this.resourceType = resourceType; - } - - public String getRpDescription() { - return rpDescription; - } - - public void setRpDescription(String rpDescription) { - this.rpDescription = rpDescription; - } - - public String getRpName() { - return rpName; - } - - public void setRpName(String rpName) { - this.rpName = rpName; - } - - public String getRpType() { - return rpType; - } - - public void setRpType(String rpType) { - this.rpType = rpType; - } - - public Date getStartDate() { - return startDate; - } - - public void setStartDate(Date startDate) { - this.startDate = startDate; - } - - public Date getEndDate() { - return endDate; - } - - public void setEndDate(Date endDate) { - this.endDate = endDate; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/common/Status.java b/dspace-rest/src/main/java/org/dspace/rest/common/Status.java deleted file mode 100644 index cdbb8210b947..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/common/Status.java +++ /dev/null @@ -1,111 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.common; - -import javax.xml.bind.annotation.XmlRootElement; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.dspace.app.util.Util; -import org.dspace.eperson.EPerson; - -/** - * Determine status of REST API - is it running, accessible and without errors?. - * Find out API version (DSpace major version) and DSpace source version. - * Find out your authentication status. - */ -@XmlRootElement(name = "status") -public class Status { - private boolean okay; - private boolean authenticated; - private String email; - private String fullname; - private String sourceVersion; - private String apiVersion; - - public Status() { - setOkay(true); - - setSourceVersion(Util.getSourceVersion()); - String[] version = Util.getSourceVersion().split("\\."); - setApiVersion(version[0]); // major version - - setAuthenticated(false); - } - - public Status(String email, String fullname) { - setOkay(true); - setAuthenticated(true); - setEmail(email); - setFullname(fullname); - } - - public Status(EPerson eperson) { - setOkay(true); - if (eperson != null) { - setAuthenticated(true); - setEmail(eperson.getEmail()); - setFullname(eperson.getFullName()); - } else { - setAuthenticated(false); - } - } - - @JsonProperty("okay") - public boolean isOkay() { - return this.okay; - } - - public void setOkay(boolean okay) { - this.okay = okay; - } - - @JsonProperty("authenticated") - public boolean isAuthenticated() { - return authenticated; - } - - public void setAuthenticated(boolean authenticated) { - this.authenticated = authenticated; - } - - @JsonProperty("email") - public String getEmail() { - return email; - } - - public void setEmail(String email) { - this.email = email; - } - - @JsonProperty("fullname") - public String getFullname() { - return fullname; - } - - public void setFullname(String fullname) { - this.fullname = fullname; - } - - @JsonProperty("sourceVersion") - public String getSourceVersion() { - return this.sourceVersion; - } - - public void setSourceVersion(String sourceVersion) { - this.sourceVersion = sourceVersion; - } - - @JsonProperty("apiVersion") - public String getApiVersion() { - return this.apiVersion; - } - - public void setApiVersion(String apiVersion) { - this.apiVersion = apiVersion; - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/exceptions/ContextException.java b/dspace-rest/src/main/java/org/dspace/rest/exceptions/ContextException.java deleted file mode 100644 index 817b662f73ac..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/exceptions/ContextException.java +++ /dev/null @@ -1,31 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.exceptions; - -/** - * Simple exception which only encapsulate classic exception. This exception is - * only for exceptions caused by creating context. - * - * @author Rostislav Novak (Computing and Information Centre, CTU in Prague) - */ -public class ContextException extends Exception { - - private static final long serialVersionUID = 1L; - - Exception causedBy; - - public ContextException(String message, Exception causedBy) { - super(message); - this.causedBy = causedBy; - } - - public Exception getCausedBy() { - return causedBy; - } - -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefs.java b/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefs.java deleted file mode 100644 index 0712ec546d73..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefs.java +++ /dev/null @@ -1,159 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.filter; - -import org.dspace.content.Item; -import org.dspace.core.Context; - -/** - * Define the set of use cases for filtering items of interest through the REST API. - * - * @author Terry Brady, Georgetown University - */ - -public class ItemFilterDefs implements ItemFilterList { - public static final String CAT_ITEM = "Item Property Filters"; - public static final String CAT_BASIC = "Basic Bitstream Filters"; - public static final String CAT_MIME = "Bitstream Filters by MIME Type"; - - public static final String[] MIMES_PDF = {"application/pdf"}; - public static final String[] MIMES_JPG = {"image/jpeg"}; - - - private enum EnumItemFilterDefs implements ItemFilterTest { - is_item("Is Item - always true", null, CAT_ITEM) { - public boolean testItem(Context context, Item item) { - return true; - } - }, - is_withdrawn("Withdrawn Items", null, CAT_ITEM) { - public boolean testItem(Context context, Item item) { - return item.isWithdrawn(); - } - }, - is_not_withdrawn("Available Items - Not Withdrawn", null, CAT_ITEM) { - public boolean testItem(Context context, Item item) { - return !item.isWithdrawn(); - } - }, - is_discoverable("Discoverable Items - Not Private", null, CAT_ITEM) { - public boolean testItem(Context context, Item item) { - return item.isDiscoverable(); - } - }, - is_not_discoverable("Not Discoverable - Private Item", null, CAT_ITEM) { - public boolean testItem(Context context, Item item) { - return !item.isDiscoverable(); - } - }, - has_multiple_originals("Item has Multiple Original Bitstreams", null, CAT_BASIC) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.countOriginalBitstream(item) > 1; - } - }, - has_no_originals("Item has No Original Bitstreams", null, CAT_BASIC) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.countOriginalBitstream(item) == 0; - } - }, - has_one_original("Item has One Original Bitstream", null, CAT_BASIC) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.countOriginalBitstream(item) == 1; - } - }, - has_doc_original("Item has a Doc Original Bitstream (PDF, Office, Text, HTML, XML, etc)", null, CAT_MIME) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil - .countOriginalBitstreamMime(context, item, ItemFilterUtil.getDocumentMimeTypes()) > 0; - } - }, - has_image_original("Item has an Image Original Bitstream", null, CAT_MIME) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.countOriginalBitstreamMimeStartsWith(context, item, "image") > 0; - } - }, - has_unsupp_type("Has Other Bitstream Types (not Doc or Image)", null, ItemFilterDefs.CAT_MIME) { - public boolean testItem(Context context, Item item) { - int bitCount = ItemFilterUtil.countOriginalBitstream(item); - if (bitCount == 0) { - return false; - } - int docCount = ItemFilterUtil - .countOriginalBitstreamMime(context, item, ItemFilterUtil.getDocumentMimeTypes()); - int imgCount = ItemFilterUtil.countOriginalBitstreamMimeStartsWith(context, item, "image"); - return (bitCount - docCount - imgCount) > 0; - } - }, - has_mixed_original("Item has multiple types of Original Bitstreams (Doc, Image, Other)", null, CAT_MIME) { - public boolean testItem(Context context, Item item) { - int countBit = ItemFilterUtil.countOriginalBitstream(item); - if (countBit <= 1) { - return false; - } - int countDoc = ItemFilterUtil - .countOriginalBitstreamMime(context, item, ItemFilterUtil.getDocumentMimeTypes()); - if (countDoc > 0) { - return countDoc != countBit; - } - int countImg = ItemFilterUtil.countOriginalBitstreamMimeStartsWith(context, item, "image"); - if (countImg > 0) { - return countImg != countBit; - } - return false; - } - }, - has_pdf_original("Item has a PDF Original Bitstream", null, CAT_MIME) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.countOriginalBitstreamMime(context, item, MIMES_PDF) > 0; - } - }, - has_jpg_original("Item has JPG Original Bitstream", null, CAT_MIME) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.countOriginalBitstreamMime(context, item, MIMES_JPG) > 0; - } - },; - - private String title = null; - private String description = null; - - private EnumItemFilterDefs(String title, String description, String category) { - this.title = title; - this.description = description; - this.category = category; - } - - private EnumItemFilterDefs() { - this(null, null, null); - } - - public String getName() { - return name(); - } - - public String getTitle() { - return title; - } - - public String getDescription() { - return description; - } - - private String category = null; - - public String getCategory() { - return category; - } - } - - public ItemFilterDefs() { - } - - public ItemFilterTest[] getFilters() { - return EnumItemFilterDefs.values(); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefsMeta.java b/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefsMeta.java deleted file mode 100644 index 96a866357d3c..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefsMeta.java +++ /dev/null @@ -1,177 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.filter; - -import java.util.regex.Pattern; - -import org.apache.logging.log4j.Logger; -import org.dspace.content.Item; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; -import org.dspace.services.factory.DSpaceServicesFactory; - -/** - * Define the set of use cases for filtering items of interest through the REST API. - * - * @author Terry Brady, Georgetown University - */ - -public class ItemFilterDefsMeta implements ItemFilterList { - protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - static Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemFilterDefsMeta.class); - - public static final String CAT_META_GEN = "General Metadata Filters"; - public static final String CAT_META_SPEC = "Specific Metadata Filters"; - public static final String CAT_MOD = "Recently Modified"; - - private enum EnumItemFilterDefs implements ItemFilterTest { - has_no_title("Has no dc.title", null, CAT_META_SPEC) { - public boolean testItem(Context context, Item item) { - return itemService.getMetadataByMetadataString(item, "dc.title").size() == 0; - } - }, - has_no_uri("Has no dc.identifier.uri", null, CAT_META_SPEC) { - public boolean testItem(Context context, Item item) { - return itemService.getMetadataByMetadataString(item, "dc.identifier.uri").size() == 0; - } - }, - has_mult_uri("Has multiple dc.identifier.uri", null, CAT_META_SPEC) { - public boolean testItem(Context context, Item item) { - return itemService.getMetadataByMetadataString(item, "dc.identifier.uri").size() > 1; - } - }, - has_compound_subject("Has compound subject", null, CAT_META_SPEC) { - public boolean testItem(Context context, Item item) { - String regex = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("rest.report-regex-compound-subject"); - return ItemFilterUtil.hasMetadataMatch(item, "dc.subject.*", Pattern.compile(regex)); - } - }, - has_compound_author("Has compound author", null, CAT_META_SPEC) { - public boolean testItem(Context context, Item item) { - String regex = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("rest.report-regex-compound-author"); - return ItemFilterUtil - .hasMetadataMatch(item, "dc.creator,dc.contributor.author", Pattern.compile(regex)); - } - }, - has_empty_metadata("Has empty metadata", null, CAT_META_GEN) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.hasMetadataMatch(item, "*", Pattern.compile("^\\s*$")); - } - }, - has_unbreaking_metadata("Has unbreaking metadata", null, CAT_META_GEN) { - public boolean testItem(Context context, Item item) { - String regex = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("rest.report-regex-unbreaking"); - return ItemFilterUtil.hasMetadataMatch(item, "*", Pattern.compile(regex)); - } - }, - has_long_metadata("Has long metadata field", null, CAT_META_GEN) { - public boolean testItem(Context context, Item item) { - String regex = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("rest.report-regex-long"); - return ItemFilterUtil.hasMetadataMatch(item, "*", Pattern.compile(regex)); - } - }, - has_xml_entity("Has XML entity in metadata", null, CAT_META_GEN) { - public boolean testItem(Context context, Item item) { - String regex = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("rest.report-regex-xml-entity"); - return ItemFilterUtil.hasMetadataMatch(item, "*", Pattern.compile(regex)); - } - }, - has_non_ascii("Has non-ascii in metadata", null, CAT_META_GEN) { - public boolean testItem(Context context, Item item) { - String regex = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("rest.report-regex-non-ascii"); - return ItemFilterUtil.hasMetadataMatch(item, "*", Pattern.compile(regex)); - } - }, - has_desc_url("Has url in description", null, CAT_META_SPEC) { - public boolean testItem(Context context, Item item) { - String regex = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("rest.report-regex-url"); - return ItemFilterUtil.hasMetadataMatch(item, "dc.description.*", Pattern.compile(regex)); - } - }, - has_fulltext_provenance("Has fulltext in provenance", null, CAT_META_SPEC) { - public boolean testItem(Context context, Item item) { - String regex = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("rest.report-regex-fulltext"); - return ItemFilterUtil.hasMetadataMatch(item, "dc.description.provenance", Pattern.compile(regex)); - } - }, - no_fulltext_provenance("Doesn't have fulltext in provenance", null, CAT_META_SPEC) { - public boolean testItem(Context context, Item item) { - String regex = DSpaceServicesFactory.getInstance().getConfigurationService() - .getProperty("rest.report-regex-fulltext"); - return !ItemFilterUtil.hasMetadataMatch(item, "dc.description.provenance", Pattern.compile(regex)); - } - }, - mod_last_day("Modified in last 1 day", null, CAT_MOD) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.recentlyModified(item, 1); - } - }, - mod_last_7_days("Modified in last 7 days", null, CAT_MOD) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.recentlyModified(item, 7); - } - }, - mod_last_30_days("Modified in last 30 days", null, CAT_MOD) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.recentlyModified(item, 30); - } - }, - mod_last_90_days("Modified in last 60 days", null, CAT_MOD) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.recentlyModified(item, 60); - } - },; - - private String title = null; - private String description = null; - - private EnumItemFilterDefs(String title, String description, String category) { - this.title = title; - this.description = description; - this.category = category; - } - - private EnumItemFilterDefs() { - this(null, null, null); - } - - public String getName() { - return name(); - } - - public String getTitle() { - return title; - } - - public String getDescription() { - return description; - } - - private String category = null; - - public String getCategory() { - return category; - } - } - - public ItemFilterDefsMeta() { - } - - public ItemFilterTest[] getFilters() { - return EnumItemFilterDefs.values(); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefsMisc.java b/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefsMisc.java deleted file mode 100644 index 5b5cc4b12d37..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefsMisc.java +++ /dev/null @@ -1,206 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.filter; - -import java.util.List; - -import org.dspace.content.Item; -import org.dspace.core.Context; -import org.dspace.rest.filter.ItemFilterUtil.BundleName; -import org.dspace.services.factory.DSpaceServicesFactory; - -/** - * Define the set of use cases for filtering items of interest through the REST API. - * - * @author Terry Brady, Georgetown University - */ - -public class ItemFilterDefsMisc implements ItemFilterList { - public static final String CAT_MISC = "Bitstream Bundle Filters"; - public static final String CAT_MIME_SUPP = "Supported MIME Type Filters"; - - private enum EnumItemFilterDefs implements ItemFilterTest { - has_only_supp_image_type("Item Image Bitstreams are Supported", null, CAT_MIME_SUPP) { - public boolean testItem(Context context, Item item) { - int imageCount = ItemFilterUtil.countOriginalBitstreamMimeStartsWith(context, item, "image/"); - if (imageCount == 0) { - return false; - } - int suppImageCount = ItemFilterUtil - .countOriginalBitstreamMime(context, item, ItemFilterUtil.getSupportedImageMimeTypes()); - return (imageCount == suppImageCount); - } - }, - has_unsupp_image_type("Item has Image Bitstream that is Unsupported", null, CAT_MIME_SUPP) { - public boolean testItem(Context context, Item item) { - int imageCount = ItemFilterUtil.countOriginalBitstreamMimeStartsWith(context, item, "image/"); - if (imageCount == 0) { - return false; - } - int suppImageCount = ItemFilterUtil - .countOriginalBitstreamMime(context, item, ItemFilterUtil.getSupportedImageMimeTypes()); - return (imageCount - suppImageCount) > 0; - } - }, - has_only_supp_doc_type("Item Document Bitstreams are Supported", null, CAT_MIME_SUPP) { - public boolean testItem(Context context, Item item) { - int docCount = ItemFilterUtil - .countOriginalBitstreamMime(context, item, ItemFilterUtil.getDocumentMimeTypes()); - if (docCount == 0) { - return false; - } - int suppDocCount = ItemFilterUtil - .countOriginalBitstreamMime(context, item, ItemFilterUtil.getSupportedDocumentMimeTypes()); - return docCount == suppDocCount; - } - }, - has_unsupp_doc_type("Item has Document Bitstream that is Unsupported", null, CAT_MIME_SUPP) { - public boolean testItem(Context context, Item item) { - int docCount = ItemFilterUtil - .countOriginalBitstreamMime(context, item, ItemFilterUtil.getDocumentMimeTypes()); - if (docCount == 0) { - return false; - } - int suppDocCount = ItemFilterUtil - .countOriginalBitstreamMime(context, item, ItemFilterUtil.getSupportedDocumentMimeTypes()); - return (docCount - suppDocCount) > 0; - } - }, - has_small_pdf("Has unusually small PDF", null, ItemFilterDefs.CAT_MIME) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil - .countBitstreamSmallerThanMinSize(context, BundleName.ORIGINAL, item, ItemFilterDefs.MIMES_PDF, - "rest.report-pdf-min-size") > 0; - } - }, - has_large_pdf("Has unusually large PDF", null, ItemFilterDefs.CAT_MIME) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil - .countBitstreamLargerThanMaxSize(context, BundleName.ORIGINAL, item, ItemFilterDefs.MIMES_PDF, - "rest.report-pdf-max-size") > 0; - } - }, - has_unsupported_bundle("Has bitstream in an unsuppored bundle", null, CAT_MISC) { - public boolean testItem(Context context, Item item) { - String[] bundleList = DSpaceServicesFactory.getInstance().getConfigurationService() - .getArrayProperty("rest.report-supp-bundles"); - return ItemFilterUtil.hasUnsupportedBundle(item, bundleList); - } - }, - has_small_thumbnail("Has unusually small thumbnail", null, CAT_MISC) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil - .countBitstreamSmallerThanMinSize(context, BundleName.THUMBNAIL, item, ItemFilterDefs.MIMES_JPG, - "rest.report-thumbnail-min-size") > 0; - } - }, - has_doc_without_text("Has document bitstream without TEXT item", null, ItemFilterDefs.CAT_MIME) { - public boolean testItem(Context context, Item item) { - int countDoc = ItemFilterUtil - .countOriginalBitstreamMime(context, item, ItemFilterUtil.getDocumentMimeTypes()); - if (countDoc == 0) { - return false; - } - int countText = ItemFilterUtil.countBitstream(BundleName.TEXT, item); - return countDoc > countText; - } - }, - has_original_without_thumbnail("Has original bitstream without thumbnail", null, CAT_MISC) { - public boolean testItem(Context context, Item item) { - int countBit = ItemFilterUtil.countOriginalBitstream(item); - if (countBit == 0) { - return false; - } - int countThumb = ItemFilterUtil.countBitstream(BundleName.THUMBNAIL, item); - return countBit > countThumb; - } - }, - has_invalid_thumbnail_name("Has invalid thumbnail name (assumes one thumbnail for each original)", null, - CAT_MISC) { - public boolean testItem(Context context, Item item) { - List originalNames = ItemFilterUtil.getBitstreamNames(BundleName.ORIGINAL, item); - List thumbNames = ItemFilterUtil.getBitstreamNames(BundleName.THUMBNAIL, item); - if (thumbNames.size() != originalNames.size()) { - return false; - } - for (String name : originalNames) { - if (!thumbNames.contains(name + ".jpg")) { - return true; - } - } - return false; - } - }, - has_non_generated_thumb("Has non generated thumbnail", null, CAT_MISC) { - public boolean testItem(Context context, Item item) { - String[] generatedThumbDesc = DSpaceServicesFactory.getInstance().getConfigurationService() - .getArrayProperty("rest.report-gen-thumbnail-desc"); - int countThumb = ItemFilterUtil.countBitstream(BundleName.THUMBNAIL, item); - if (countThumb == 0) { - return false; - } - int countGen = ItemFilterUtil.countBitstreamByDesc(BundleName.THUMBNAIL, item, generatedThumbDesc); - return (countThumb > countGen); - } - }, - no_license("Doesn't have a license", null, CAT_MISC) { - public boolean testItem(Context context, Item item) { - return ItemFilterUtil.countBitstream(BundleName.LICENSE, item) == 0; - } - }, - has_license_documentation("Has documentation in the license bundle", null, CAT_MISC) { - public boolean testItem(Context context, Item item) { - List names = ItemFilterUtil.getBitstreamNames(BundleName.LICENSE, item); - for (String name : names) { - if (!name.equals("license.txt")) { - return true; - } - } - return false; - } - },; - - private String title = null; - private String description = null; - - private EnumItemFilterDefs(String title, String description, String category) { - this.title = title; - this.description = description; - this.category = category; - } - - private EnumItemFilterDefs() { - this(null, null, null); - } - - public String getName() { - return name(); - } - - public String getTitle() { - return title; - } - - public String getDescription() { - return description; - } - - private String category = null; - - public String getCategory() { - return category; - } - } - - public ItemFilterDefsMisc() { - } - - public ItemFilterTest[] getFilters() { - return EnumItemFilterDefs.values(); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefsPerm.java b/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefsPerm.java deleted file mode 100644 index 9e80f31196b4..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterDefsPerm.java +++ /dev/null @@ -1,138 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.filter; - -import java.sql.SQLException; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.content.Bitstream; -import org.dspace.content.Bundle; -import org.dspace.content.Item; -import org.dspace.core.Context; -import org.dspace.rest.filter.ItemFilterUtil.BundleName; - -/** - * Define the set of use cases for filtering items of interest through the REST API. - * - * @author Terry Brady, Georgetown University - */ -public class ItemFilterDefsPerm implements ItemFilterList { - protected static AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - public static final String CAT_PERM = "Perimission Filters"; - private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemFilterDefsPerm.class); - - public ItemFilterDefsPerm() { - } - - public enum EnumItemFilterPermissionDefs implements ItemFilterTest { - has_restricted_original("Item has Restricted Original Bitstream", - "Item has at least one original bitstream that is not accessible to Anonymous user", - CAT_PERM) { - public boolean testItem(Context context, Item item) { - try { - for (Bundle bundle : item.getBundles()) { - if (!bundle.getName().equals(BundleName.ORIGINAL.name())) { - continue; - } - for (Bitstream bit : bundle.getBitstreams()) { - if (!authorizeService - .authorizeActionBoolean(getAnonContext(), bit, org.dspace.core.Constants.READ)) { - return true; - } - } - } - } catch (SQLException e) { - ItemFilterDefsPerm.log.warn("SQL Exception testing original bitstream access " + e.getMessage(), e); - } - return false; - } - }, - has_restricted_thumbnail("Item has Restricted Thumbnail", - "Item has at least one thumbnail that is not accessible to Anonymous user", CAT_PERM) { - public boolean testItem(Context context, Item item) { - try { - for (Bundle bundle : item.getBundles()) { - if (!bundle.getName().equals(BundleName.THUMBNAIL.name())) { - continue; - } - for (Bitstream bit : bundle.getBitstreams()) { - if (!authorizeService - .authorizeActionBoolean(getAnonContext(), bit, org.dspace.core.Constants.READ)) { - return true; - } - } - } - } catch (SQLException e) { - ItemFilterDefsPerm.log - .warn("SQL Exception testing thumbnail bitstream access " + e.getMessage(), e); - } - return false; - } - }, - has_restricted_metadata("Item has Restricted Metadata", - "Item has metadata that is not accessible to Anonymous user", CAT_PERM) { - public boolean testItem(Context context, Item item) { - try { - return !authorizeService - .authorizeActionBoolean(getAnonContext(), item, org.dspace.core.Constants.READ); - } catch (SQLException e) { - ItemFilterDefsPerm.log.warn("SQL Exception testing item metadata access " + e.getMessage(), e); - return false; - } - } - },; - - private static Context anonContext; - - private static Context getAnonContext() { - if (anonContext == null) { - anonContext = new Context(); - } - return anonContext; - } - - - private String title = null; - private String description = null; - - private EnumItemFilterPermissionDefs(String title, String description, String category) { - this.title = title; - this.description = description; - this.category = category; - } - - private EnumItemFilterPermissionDefs() { - this(null, null, null); - } - - public String getName() { - return name(); - } - - public String getTitle() { - return title; - } - - public String getDescription() { - return description; - } - - private String category = null; - - public String getCategory() { - return category; - } - } - - @Override - public ItemFilterTest[] getFilters() { - return EnumItemFilterPermissionDefs.values(); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterList.java b/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterList.java deleted file mode 100644 index f6590e36f8e2..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterList.java +++ /dev/null @@ -1,12 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.filter; - -public interface ItemFilterList { - public ItemFilterTest[] getFilters(); -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterSet.java b/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterSet.java deleted file mode 100644 index f70bc9664df1..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterSet.java +++ /dev/null @@ -1,143 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.filter; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import javax.servlet.ServletContext; -import javax.ws.rs.WebApplicationException; - -import org.apache.logging.log4j.Logger; -import org.dspace.authorize.factory.AuthorizeServiceFactory; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; -import org.dspace.rest.common.Item; -import org.dspace.rest.common.ItemFilter; - -/** - * The set of Item Filter Use Cases to apply to a collection of items. - * - * @author Terry Brady, Georgetown University - */ -public class ItemFilterSet { - protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); - static Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemFilterSet.class); - - private List itemFilters; - private ItemFilter allFiltersFilter; - - /** - * Construct a set of Item Filters identified by a list string. - * - * @param filterList Comma separated list of filter names to include. - * Use {@link org.dspace.rest.common.ItemFilter#ALL} to retrieve all filters. - * @param reportItems If true, return item details. If false, return only counts of items. - */ - public ItemFilterSet(String filterList, boolean reportItems) { - log.debug(String.format("Create ItemFilterSet: %s", filterList)); - itemFilters = ItemFilter.getItemFilters(filterList, reportItems); - allFiltersFilter = ItemFilter.getAllFiltersFilter(itemFilters); - } - - /** - * Get the special filter that represents the intersection of all items in the Item Filter Set. - * - * @return the special Item Filter that contains items that satisfied every other Item Filter in the Item Filter Set - */ - public ItemFilter getAllFiltersFilter() { - return allFiltersFilter; - } - - /** - * Evaluate an item against the use cases in the Item Filter Set. - * - * If an item satisfies all items in the Item Filter Set, it should also ve added to the special all items filter. - * - * @param context Active DSpace Context - * @param item DSpace Object to evaluate - * @param restItem REST representation of the DSpace Object being evaluated - */ - public void testItem(Context context, org.dspace.content.Item item, Item restItem) { - boolean bAllTrue = true; - for (ItemFilter itemFilter : itemFilters) { - if (itemFilter.hasItemTest()) { - bAllTrue &= itemFilter.testItem(context, item, restItem); - } - } - if (bAllTrue && allFiltersFilter != null) { - allFiltersFilter.addItem(restItem); - } - } - - /** - * Get all of the Item Filters initialized into the Item Filter Set - * - * @return a list of Item Filters initialized into the Item Filter Set - */ - public List getItemFilters() { - return itemFilters; - } - - /** - * Evaluate a set of Items against the Item Filters in the Item Filter Set - * Current DSpace Context - * - * @param context Current DSpace Context - * @param servletContext Context of the servlet container. - * @param childItems Collection of Items to Evaluate - * @param save If true, save the details of each item that is evaluated - * @param expand List of item details to include in the results - * @return The number of items evaluated - * @throws WebApplicationException Runtime exception for applications. - * @throws SQLException An exception that provides information on a database access error or other - * errors. - */ - public int processSaveItems(Context context, ServletContext servletContext, - Iterator childItems, boolean save, String expand) - throws WebApplicationException, SQLException { - return processSaveItems(context, servletContext, childItems, new ArrayList(), save, expand); - } - - /** - * Evaluate a set of Items against the Item Filters in the Item Filter Set - * - * @param context Current DSpace Context - * @param servletContext Context of the servlet container. - * @param childItems Collection of Items to Evaluate - * @param items List of items to contain saved results - * @param save If true, save the details of each item that is evaluated - * @param expand List of item details to include in the results - * @return The number of items evaluated - * @throws WebApplicationException Runtime exception for applications. - * @throws SQLException An exception that provides information on a database access error or other - * errors. - */ - public int processSaveItems(Context context, ServletContext servletContext, - Iterator childItems, List items, boolean save, - String expand) throws WebApplicationException, SQLException { - int count = 0; - while (childItems.hasNext()) { - count++; - org.dspace.content.Item item = childItems.next(); - log.debug(item.getHandle() + " evaluate."); - if (authorizeService.authorizeActionBoolean(context, item, org.dspace.core.Constants.READ)) { - Item restItem = new Item(item, servletContext, expand, context); - if (save) { - items.add(restItem); - } - testItem(context, item, restItem); - } else { - log.debug(item.getHandle() + " not authorized - not included in result set."); - } - } - return count; - } - -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterTest.java b/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterTest.java deleted file mode 100644 index 4ef2998e1613..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.filter; - -import org.dspace.content.Item; -import org.dspace.core.Context; - -/** - * Item Filter Use Case Interface. - * Items will be evaluated against a set of filters. - * - * @author Terry Brady, Georgetown University - */ -public interface ItemFilterTest { - public String getName(); - - public String getTitle(); - - public String getDescription(); - - public String getCategory(); - - public boolean testItem(Context context, Item i); -} diff --git a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterUtil.java b/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterUtil.java deleted file mode 100644 index ddb75f0db800..000000000000 --- a/dspace-rest/src/main/java/org/dspace/rest/filter/ItemFilterUtil.java +++ /dev/null @@ -1,278 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.rest.filter; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; -import java.util.regex.Pattern; - -import com.ibm.icu.util.Calendar; -import org.apache.logging.log4j.Logger; -import org.dspace.content.Bitstream; -import org.dspace.content.Bundle; -import org.dspace.content.Item; -import org.dspace.content.MetadataValue; -import org.dspace.content.factory.ContentServiceFactory; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; -import org.dspace.services.factory.DSpaceServicesFactory; - -public class ItemFilterUtil { - protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService(); - static Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemFilterUtil.class); - - public enum BundleName { ORIGINAL, TEXT, LICENSE, THUMBNAIL } - - /** - * Default constructor - */ - private ItemFilterUtil() { } - - static String[] getDocumentMimeTypes() { - return DSpaceServicesFactory.getInstance().getConfigurationService() - .getArrayProperty("rest.report-mime-document"); - } - - static String[] getSupportedDocumentMimeTypes() { - return DSpaceServicesFactory.getInstance().getConfigurationService() - .getArrayProperty("rest.report-mime-document-supported"); - } - - static String[] getSupportedImageMimeTypes() { - return DSpaceServicesFactory.getInstance().getConfigurationService() - .getArrayProperty("rest.report-mime-document-image"); - } - - static int countOriginalBitstream(Item item) { - return countBitstream(BundleName.ORIGINAL, item); - } - - static int countBitstream(BundleName bundleName, Item item) { - int count = 0; - for (Bundle bundle : item.getBundles()) { - if (!bundle.getName().equals(bundleName.name())) { - continue; - } - count += bundle.getBitstreams().size(); - } - - return count; - } - - static List getBitstreamNames(BundleName bundleName, Item item) { - ArrayList names = new ArrayList(); - for (Bundle bundle : item.getBundles()) { - if (!bundle.getName().equals(bundleName.name())) { - continue; - } - for (Bitstream bit : bundle.getBitstreams()) { - names.add(bit.getName()); - } - } - return names; - } - - - static int countOriginalBitstreamMime(Context context, Item item, String[] mimeList) { - return countBitstreamMime(context, BundleName.ORIGINAL, item, mimeList); - } - - static int countBitstreamMime(Context context, BundleName bundleName, Item item, String[] mimeList) { - int count = 0; - for (Bundle bundle : item.getBundles()) { - if (!bundle.getName().equals(bundleName.name())) { - continue; - } - for (Bitstream bit : bundle.getBitstreams()) { - for (String mime : mimeList) { - try { - if (bit.getFormat(context).getMIMEType().equals(mime.trim())) { - count++; - } - } catch (SQLException e) { - log.error("Get format error for bitstream " + bit.getName()); - } - } - } - } - return count; - } - - static int countBitstreamByDesc(BundleName bundleName, Item item, String[] descList) { - int count = 0; - for (Bundle bundle : item.getBundles()) { - if (!bundle.getName().equals(bundleName.name())) { - continue; - } - for (Bitstream bit : bundle.getBitstreams()) { - for (String desc : descList) { - String bitDesc = bit.getDescription(); - if (bitDesc == null) { - continue; - } - if (bitDesc.equals(desc.trim())) { - count++; - } - } - } - } - return count; - } - - static int countBitstreamSmallerThanMinSize(Context context, BundleName bundleName, Item item, String[] mimeList, - String prop) { - long size = DSpaceServicesFactory.getInstance().getConfigurationService().getLongProperty(prop); - int count = 0; - try { - for (Bundle bundle : item.getBundles()) { - if (!bundle.getName().equals(bundleName.name())) { - continue; - } - for (Bitstream bit : bundle.getBitstreams()) { - for (String mime : mimeList) { - if (bit.getFormat(context).getMIMEType().equals(mime.trim())) { - if (bit.getSizeBytes() < size) { - count++; - } - } - } - } - } - } catch (SQLException e) { - // ignore - } - return count; - } - - static int countBitstreamLargerThanMaxSize(Context context, BundleName bundleName, Item item, String[] mimeList, - String prop) { - long size = DSpaceServicesFactory.getInstance().getConfigurationService().getLongProperty(prop); - int count = 0; - try { - for (Bundle bundle : item.getBundles()) { - if (!bundle.getName().equals(bundleName.name())) { - continue; - } - for (Bitstream bit : bundle.getBitstreams()) { - for (String mime : mimeList) { - if (bit.getFormat(context).getMIMEType().equals(mime.trim())) { - if (bit.getSizeBytes() > size) { - count++; - } - } - } - } - } - } catch (SQLException e) { - // ignore - } - return count; - } - - static int countOriginalBitstreamMimeStartsWith(Context context, Item item, String prefix) { - return countBitstreamMimeStartsWith(context, BundleName.ORIGINAL, item, prefix); - } - - static int countBitstreamMimeStartsWith(Context context, BundleName bundleName, Item item, String prefix) { - int count = 0; - try { - for (Bundle bundle : item.getBundles()) { - if (!bundle.getName().equals(bundleName.name())) { - continue; - } - for (Bitstream bit : bundle.getBitstreams()) { - if (bit.getFormat(context).getMIMEType().startsWith(prefix)) { - count++; - } - } - } - } catch (SQLException e) { - // ignore - } - return count; - } - - static boolean hasUnsupportedBundle(Item item, String[] bundleList) { - if (bundleList == null) { - return false; - } - ArrayList bundles = new ArrayList(); - for (String bundleName : bundleList) { - bundles.add(bundleName.trim()); - } - for (Bundle bundle : item.getBundles()) { - if (!bundles.contains(bundle.getName())) { - return true; - } - } - return false; - } - - static boolean hasOriginalBitstreamMime(Context context, Item item, String[] mimeList) { - return hasBitstreamMime(context, BundleName.ORIGINAL, item, mimeList); - } - - static boolean hasBitstreamMime(Context context, BundleName bundleName, Item item, String[] mimeList) { - return countBitstreamMime(context, bundleName, item, mimeList) > 0; - } - - static boolean hasMetadataMatch(Item item, String fieldList, Pattern regex) { - if (fieldList.equals("*")) { - for (MetadataValue md : itemService - .getMetadata(item, org.dspace.content.Item.ANY, org.dspace.content.Item.ANY, - org.dspace.content.Item.ANY, org.dspace.content.Item.ANY)) { - if (regex.matcher(md.getValue()).matches()) { - return true; - } - } - } else { - for (String field : fieldList.split(",")) { - for (MetadataValue md : itemService.getMetadataByMetadataString(item, field.trim())) { - if (regex.matcher(md.getValue()).matches()) { - return true; - } - } - } - } - - return false; - } - - static boolean hasOnlyMetadataMatch(Item item, String fieldList, Pattern regex) { - boolean matches = false; - if (fieldList.equals("*")) { - for (MetadataValue md : itemService - .getMetadata(item, org.dspace.content.Item.ANY, org.dspace.content.Item.ANY, - org.dspace.content.Item.ANY, org.dspace.content.Item.ANY)) { - if (regex.matcher(md.getValue()).matches()) { - matches = true; - } else { - return false; - } - } - } else { - for (String field : fieldList.split(",")) { - for (MetadataValue md : itemService.getMetadataByMetadataString(item, field.trim())) { - if (regex.matcher(md.getValue()).matches()) { - matches = true; - } else { - return false; - } - } - } - } - return matches; - } - - static boolean recentlyModified(Item item, int days) { - Calendar cal = Calendar.getInstance(); - cal.add(Calendar.DATE, -days); - return cal.getTime().before(item.getLastModified()); - } -} diff --git a/dspace-rest/src/main/java/org/dspace/utils/DSpaceWebapp.java b/dspace-rest/src/main/java/org/dspace/utils/DSpaceWebapp.java deleted file mode 100644 index 5d3ce8bfa8bb..000000000000 --- a/dspace-rest/src/main/java/org/dspace/utils/DSpaceWebapp.java +++ /dev/null @@ -1,28 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ - -package org.dspace.utils; - -import org.dspace.app.util.AbstractDSpaceWebapp; - -/** - * An MBean to identify this web application. - * - * @author Bram Luyten (bram at atmire dot com) - */ -public class DSpaceWebapp - extends AbstractDSpaceWebapp { - public DSpaceWebapp() { - super("REST"); - } - - @Override - public boolean isUI() { - return false; - } -} diff --git a/dspace-rest/src/main/webapp/WEB-INF/applicationContext.xml b/dspace-rest/src/main/webapp/WEB-INF/applicationContext.xml deleted file mode 100644 index ec892fbaa4f1..000000000000 --- a/dspace-rest/src/main/webapp/WEB-INF/applicationContext.xml +++ /dev/null @@ -1,59 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dspace-rest/src/main/webapp/WEB-INF/security-applicationContext.xml b/dspace-rest/src/main/webapp/WEB-INF/security-applicationContext.xml deleted file mode 100644 index 677753d7f0c0..000000000000 --- a/dspace-rest/src/main/webapp/WEB-INF/security-applicationContext.xml +++ /dev/null @@ -1,80 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dspace-rest/src/main/webapp/WEB-INF/web.xml b/dspace-rest/src/main/webapp/WEB-INF/web.xml deleted file mode 100644 index 34d74d9630ba..000000000000 --- a/dspace-rest/src/main/webapp/WEB-INF/web.xml +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - dspace.request - org.dspace.utils.servlet.DSpaceWebappServletFilter - - - - dspace.request - /* - - - - - springSecurityFilterChain - org.springframework.web.filter.DelegatingFilterProxy - - - - springSecurityFilterChain - /* - - - - - DSpace REST API (Deprecated) - - org.glassfish.jersey.servlet.ServletContainer - - - javax.ws.rs.Application - org.dspace.rest.DSpaceRestApplication - - 1 - - - - DSpace REST API (Deprecated) - /* - - - - default - /static/* - - - - - - DSpace REST API (Deprecated) - /* - - - CONFIDENTIAL - - - - - - - The location of the DSpace home directory - - dspace.dir - ${dspace.dir} - - - - - The location of the Log4J configuration - - log4jConfiguration - ${dspace.dir}/config/log4j2.xml - - - - contextConfigLocation - - /WEB-INF/applicationContext.xml, - /WEB-INF/security-applicationContext.xml - - - - - org.dspace.app.util.DSpaceContextListener - - - - - org.dspace.servicemanager.servlet.DSpaceKernelServletContextListener - - - - - org.springframework.web.context.ContextLoaderListener - - - - - org.dspace.app.util.DSpaceWebappListener - - - - diff --git a/dspace-rest/src/main/webapp/static/reports/authenticate.html b/dspace-rest/src/main/webapp/static/reports/authenticate.html deleted file mode 100644 index 046ced425c70..000000000000 --- a/dspace-rest/src/main/webapp/static/reports/authenticate.html +++ /dev/null @@ -1,58 +0,0 @@ - - - - - - - - - - - - - - - Authenticate for the REST Report Tools - - -

    Login for an Authenticated Report View

    -
    This is intended for sites with Password Authentication Enabled
    - -
    -
    - - -
    -
    - - -
    -
    - - -
    -
    - - \ No newline at end of file diff --git a/dspace-rest/src/main/webapp/static/reports/index.html b/dspace-rest/src/main/webapp/static/reports/index.html deleted file mode 100644 index bc71b0417c74..000000000000 --- a/dspace-rest/src/main/webapp/static/reports/index.html +++ /dev/null @@ -1,77 +0,0 @@ - - - - - - - - - - - - - - - - DSpace REST QC Client - - -Query Tool - -
    -

    DSpace REST QC Client

    -
    -

    Filters

    -
    -
    - -
    -
    -
    -
    - -
    -
    -

    Collection Report

    - -

    Item Results

    -
    -

    Additional data to return

    -
    - -
    -
    -

    Bitstream data to return

    -
    -
    - -
    -

    Results

    -
    -

    - -
    - - - - Export will export one page of results -
    - - -
    -
    -
    -
    - - \ No newline at end of file diff --git a/dspace-rest/src/main/webapp/static/reports/query.html b/dspace-rest/src/main/webapp/static/reports/query.html deleted file mode 100644 index 5a7a79cb2063..000000000000 --- a/dspace-rest/src/main/webapp/static/reports/query.html +++ /dev/null @@ -1,105 +0,0 @@ - - - - - - - - - - - - - - - DSpace REST Query Client - - -Collection Filter - -
    -

    DSpace REST Query Client

    -
    -
    -

    Collection Selector

    -
    -
    -

    Metadata Field Queries

    -
    -
    - - -
    -
    -
    -
    - -
    -
    -

    Limit/Paginate Queries

    -
    -
    - - - -
    -
    - -
    -
    -

    Filters

    -
    -
    - -
    -
    -
    -
    - -
    -
    -

    Additional data to return

    -
    -
    -
    - -
    -
    -

    Bitstream data to return

    -
    -
    - -
    -

    Item Results

    -
    -

    - -
    - - - - Export will export one page of results, increase result limits as needed -
    - -
    -
    -
    -
    - - \ No newline at end of file diff --git a/dspace-rest/src/main/webapp/static/reports/restClient.css b/dspace-rest/src/main/webapp/static/reports/restClient.css deleted file mode 100644 index d81724ae6776..000000000000 --- a/dspace-rest/src/main/webapp/static/reports/restClient.css +++ /dev/null @@ -1,98 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -table {border-collapse: collapse;border-right:solid thin black;} -table td, table th {border: thin solid black; padding: 4px;} -tr.header {background-color: #EEEEEE;} -tr:hover td, tr:hover th {background-color: #DDDDDD;} -tr.even td {border-bottom: thin dotted black;} -tr.odd td {border-top: thin dotted black;} -td.even {background-color: #EEFFEE;} -td.head {background-color: #EEEEFF;} -td.num {text-align: right;} -td.link {text-decoration: underline; color: blue;} -td, th {width: 100px;} -td.error {color: red;background-color: yellow;} -td.title, th.title {width: 400px;} -td.mod, th.mod {width: 200px;} -#itemtable {width: 100%;} -#itemdiv {display: none;} -td.ititle, th.ititle {width: 600px;} -td.partial {color:red; font-style: italic;} - -button:disabled { - background-color:gray; -} -input:read-only { - background-color: gray; -} -div.metadata { - padding: 2px; - width: 880px; -} -#metadatadiv select, #metadatadiv input { - padding: 2px; - margin: 4px; -} -#metadatadiv fieldset { - margin: 6px 15px; - width: 850px; -} - -#metadatadiv label { - font-weight: bold; -} - -#itemtable td div:not(:first-child) { - border-top: thin solid gray; -} - -body { - min-height: 700px; - min-width: 700px; -} - -tr.header th { - vertical-align: bottom; -} - -a.partial::after { - content:" ?"; -} - -fieldset.catdiv { - border: thin solid black; - margin-bottom: 8px; -} - -fieldset.catdiv div { - width: 380px; - float: left; -} - -#collSel { - width: 90%; -} - -#filterdiv label { - font-weight: normal; -} - -.button { - background-color: #EEEEEE; -} - -.toobig::before { - content: "*"; -} -#exlimit { - font-style: italic; -} - -.red { - color: red; -} \ No newline at end of file diff --git a/dspace-rest/src/main/webapp/static/reports/restCollReport.js b/dspace-rest/src/main/webapp/static/reports/restCollReport.js deleted file mode 100644 index 1d1c04ae07e3..000000000000 --- a/dspace-rest/src/main/webapp/static/reports/restCollReport.js +++ /dev/null @@ -1,513 +0,0 @@ -/* - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -var CollReport = function() { - Report.call(this); - //If sortable.js is included, uncomment the following - //this.hasSorttable = function(){return true;} - this.getLangSuffix = function(){ - return "[en]"; - } - - //Indicate if Password Authentication is supported - //this.makeAuthLink = function(){return true;}; - //Indicate if Shibboleth Authentication is supported - //this.makeShibLink = function(){return true;}; - - this.COLL_LIMIT = 20; - this.TOOBIG = 10000; - this.loadId = 0; - this.THREADS =11; - this.THREADSP = 11; - this.ACCIDX_COLL = 1; - this.ACCIDX_ITEM = 2; - this.IACCIDX_META = 0; - this.IACCIDX_BIT = 1; - this.IACCIDX_ITEM = 2; - this.getDefaultParameters = function(){ - return { - "show_fields[]" : [], - "show_fields_bits[]" : [], - filters : "", - limit : this.COUNT_LIMIT, - offset : 0, - icollection : "", - ifilter : "", - }; - } - this.getCurrentParameters = function(){ - return { - "show_fields[]" : this.myMetadataFields.getShowFields(), - "show_fields_bits[]" : this.myBitstreamFields.getShowFieldsBits(), - filters : this.myFilters.getFilterList(), - limit : this.myReportParameters.getLimit(), - offset : this.myReportParameters.getOffset(), - icollection : $("#icollection").val(), - ifilter : $("#ifilter").val(), - }; - } - var self = this; - - this.init = function() { - this.baseInit(); - $("#icollection").val(self.myReportParameters.params.icollection); - $("#ifilter").val(self.myReportParameters.params.ifilter); - $("#itemResults").accordion({ - heightStyle: "content", - collapsible: true, - active: 2 - }); - } - - this.myAuth.callback = function(data) { - self.createCollectionTable(); - $(".showCollections").bind("click", function(){ - self.loadData(); - }); - $("#refresh-fields,#refresh-fields-bits").bind("click", function(){ - self.drawItemTable($("#icollection").val(), $("#ifilter").val(), 0); - }); - } - - this.createCollectionTable = function() { - var self = this; - var tbl = $(""); - tbl.attr("id","table"); - $("#report").replaceWith(tbl); - - var thead = $(""); - tbl.append(thead); - var tbody = $(""); - tbl.append(tbody); - var tr = self.myHtmlUtil.addTr(thead).addClass("header"); - self.myHtmlUtil.addTh(tr, "Num").addClass("num").addClass("sorttable_numeric"); - self.myHtmlUtil.addTh(tr, "Community").addClass("title"); - self.myHtmlUtil.addTh(tr, "Collection").addClass("title"); - var thn = self.myHtmlUtil.addTh(tr, "Num Items").addClass("sorttable_numeric"); - self.myHtmlUtil.makeTotalCol(thn); - thn = self.myHtmlUtil.addTh(tr, "Num Filtered").addClass("sorttable_numeric"); - self.myHtmlUtil.makeTotalCol(thn); - - self.addCollections(); - } - - this.addCollections = function() { - var self = this; - - $.ajax({ - url: "/rest/hierarchy", - dataType: "json", - headers: self.myAuth.getHeaders(), - success: function(data){ - if (data.community != null) { - $.each(data.community, function(index, comm){ - self.addCommunity(comm, comm); - }); - } - self.setCollectionCounts(0); - }, - error: function(xhr, status, errorThrown) { - alert("Error in /rest/hierarchy "+ status+ " " + errorThrown); - } - }); - }; - - this.addCommunity = function(top, comm) { - var self = this; - - if (comm.collection != null) { - $.each(comm.collection, function(index, coll){ - self.addCollection(top, coll); - }); - } - if (comm.community != null) { - $.each(comm.community, function(index, scomm){ - self.addCommunity(top, scomm); - }); - } - }; - - this.addCollection = function(top, coll) { - var self = this; - - var tbody = $("#table tbody"); - var index = tbody.find("tr").length; - - var tr = self.myHtmlUtil.addTr(tbody); - tr.attr("cid", coll.id).attr("index",index).addClass(index % 2 == 0 ? "odd data" : "even data"); - self.myHtmlUtil.addTd(tr, index + 1).addClass("num"); - var parval = self.myHtmlUtil.getAnchor(top.name, self.ROOTPATH + top.handle); - - self.myHtmlUtil.addTd(tr, parval).addClass("title comm"); - self.myHtmlUtil.addTdAnchor(tr, coll.name, self.ROOTPATH + coll.handle).addClass("title"); - var td = self.myHtmlUtil.addTd(tr, "").addClass("num").addClass("link").addClass("numCount"); - td = self.myHtmlUtil.addTd(tr, "").addClass("num").addClass("numFiltered"); - }; - - - this.setCollectionCounts = function(offset) { - var self = this; - - $.ajax({ - url: "/rest/filtered-collections", - data: { - limit : self.COLL_LIMIT, - offset : offset - }, - dataType: "json", - headers: self.myAuth.getHeaders(), - success: function(data){ - $.each(data, function(index, coll){ - var id = self.getId(coll); - var tr = $("#table tbody").find("tr[cid="+id+"]"); - var td = tr.find("td.numCount"); - td.text(coll.numberItems); - td.on("click", function(){ - self.drawItemTable(self.getId(coll),'',0); - $("#icollection").val(self.getId(coll)); - $("#ifilter").val(""); - }); - }); - - //cannot assume data returned is full amount in case some items are restricted - //if (data.length == self.COLL_LIMIT) { - if (data.length > 0) { - self.setCollectionCounts(offset + self.COLL_LIMIT); - return; - } - self.myHtmlUtil.totalCol(3); - $("#table").addClass("sortable"); - - if (self.myFilters.getFilterList() != "") { - self.loadData(); - if ($("#icollection").val() != "") { - self.drawItemTable($("#icollection").val(), $("#ifilter").val(), 0); - } - } - }, - error: function(xhr, status, errorThrown) { - alert("Error in /rest/collections "+ status+ " " + errorThrown); - }, - complete: function(xhr, status) { - self.spinner.stop(); - $(".showCollections").attr("disabled", false); - } - }); - } - - this.loadData = function() { - self.spinner.spin($("h1")[0]); - $(".showCollections").attr("disabled", true); - $("#metadatadiv").accordion("option", "active", self.ACCIDX_COLL); - self.loadId++; - $("td.datacol,th.datacol").remove(); - $("#table tr.data").addClass("processing"); - self.myFilters.filterString = self.myFilters.getFilterList(); - self.doRow(0, self.THREADS, self.loadId); - } - - this.doRow = function(row, threads, curLoadId) { - if (self.loadId != curLoadId) return; - var tr = $("tr[index="+row+"]"); - if (!tr.is("*")){ - return; - } - - var cid = tr.attr("cid"); - $.ajax({ - url: "/rest/filtered-collections/"+cid, - data: { - limit : self.COUNT_LIMIT, - filters : self.myFilters.filterString, - }, - dataType: "json", - headers: self.myAuth.getHeaders(), - success: function(data) { - var numItems = data.numberItems; - var numItemsProcessed = data.numberItemsProcessed; - $.each(data.itemFilters, function(index, itemFilter){ - if (self.loadId != curLoadId) { - return; - } - var trh = $("#table tr.header"); - var filterName = itemFilter["filter-name"]; - var filterTitle = itemFilter.title == null ? filterName : itemFilter.title; - if (!trh.find("th."+filterName).is("*")) { - var th = self.myHtmlUtil.addTh(trh, filterTitle); - th.addClass(filterName).addClass("datacol").addClass("sorttable_numeric"); - self.myHtmlUtil.makeTotalCol(th); - - if (itemFilter.description != null) { - th.attr("title", itemFilter.description); - } - - $("tr.data").each(function(){ - var td = self.myHtmlUtil.addTd($(this), ""); - td.addClass(filterName).addClass("num").addClass("datacol"); - }); - } - - self.setCellCount(tr, cid, 0, (numItems != numItemsProcessed), itemFilter); - self.setFilteredCount(tr, cid, 0, numItems, numItemsProcessed); - }); - - tr.removeClass("processing"); - if (!$("#table tr.processing").is("*")) { - self.updateSortable(); - self.totalFilters(); - self.spinner.stop(); - $(".showCollections").attr("disabled", false); - return; - } - if (row % threads == 0 || threads == 1) { - for(var i=1; i<=threads; i++) { - self.doRow(row+i, threads, curLoadId); - } - } - }, - error: function(xhr, status, errorThrown) { - alert("Error in /rest/filtered-collections "+ status+ " " + errorThrown); - }, - complete: function(xhr, status) { - self.spinner.stop(); - $(".showCollections").attr("disabled", false); - } - }); - }; - - this.updateSortable = function() { - if (self.hasSorttable()) { - $("#table").removeClass("sortable"); - $("#table").addClass("sortable"); - sorttable.makeSortable($("#table")[0]); - } - } - - this.totalFilters = function() { - var colcount = $("#table tr th").length; - for(var i=4; i= self.TOOBIG) { - td.addClass("toobig"); - title+= "\nIt will take significant time to apply this filter to the entire collection." - } - td.attr("title", title); - return false; - } else { - self.totalFilters(); - } - return true; - } - - this.setCellCount = function(tr, cid, offset, isPartial, itemFilter) { - var filterName = itemFilter["filter-name"]; - var icount = itemFilter["item-count"]; - - var td = tr.find("td."+filterName); - if (icount == null) { - icount = 0; - } - var cur = parseInt(td.text()); - if (!isNaN(cur)) { - icount += cur; - } - - td.removeClass("partial"); - td.removeClass("link"); - td.removeAttr("title"); - td.off(); - td.text(icount); - if (icount != 0) { - td.addClass("link"); - if (isPartial) { - td.addClass("partial"); - td.attr("title", "Collection partially processed, item counts are incomplete"); - } - td.on("click", function(){ - self.drawItemTable(cid,filterName,0); - $("#icollection").val(cid); - $("#ifilter").val(filterName); - }); - } - } - - - this.drawItemTable = function(cid, filter, offset) { - self = this; - self.spinner.spin($("h1")[0]); - $("#itemtable").replaceWith($('
    ')); - var itbl = $("#itemtable"); - //itbl.find("tr").remove("*"); - var tr = self.myHtmlUtil.addTr(itbl).addClass("header"); - self.myHtmlUtil.addTh(tr, "Num").addClass("num").addClass("sorttable_numeric"); - self.myHtmlUtil.addTh(tr, "id"); - self.myHtmlUtil.addTh(tr, "Handle"); - self.myHtmlUtil.addTh(tr, "dc.title" + self.getLangSuffix()).addClass("title"); - var fields = $("#show-fields select").val(); - if (fields != null) { - $.each(fields, function(index, field){ - self.myHtmlUtil.addTh(tr, field + self.getLangSuffix()); - }); - } - var bitfields = $("#show-fields-bits select").val(); - if (bitfields != null) { - $.each(bitfields, function(index, bitf){ - self.myHtmlUtil.addTh(tr, bitf); - }); - } - - var expand = "items"; - if (fields != null) { - expand += ",metadata"; - } - if (bitfields != null) { - expand += ",bitstreams"; - } - - var params = { - expand: expand, - limit: self.ITEM_LIMIT, - filters: filter, - offset: offset, - "show_fields[]" : fields, - "show_fields_bits[]" : bitfields, - } - - $.ajax({ - url: "/rest/filtered-collections/"+cid, - data: params, - dataType: "json", - headers: self.myAuth.getHeaders(), - success: function(data){ - var source = filter == "" ? data.items : data.itemFilters[0].items; - - $.each(source, function(index, item){ - var tr = self.myHtmlUtil.addTr(itbl); - tr.addClass(index % 2 == 0 ? "odd data" : "even data"); - self.myHtmlUtil.addTd(tr, offset+index+1).addClass("num"); - self.myHtmlUtil.addTd(tr, self.getId(item)); - self.myHtmlUtil.addTdAnchor(tr, item.handle, self.ROOTPATH + item.handle); - self.myHtmlUtil.addTd(tr, item.name).addClass("ititle"); - if (fields != null) { - $.each(fields, function(index, field){ - var text = ""; - var td = self.myHtmlUtil.addTd(tr, ""); - $.each(item.metadata, function(mindex,mv){ - if (mv.key == field) { - td.append($("
    "+mv.value+"
    ")); - } - }); - }); - } - if (bitfields != null) { - $.each(bitfields, function(index, bitfield){ - var td = self.myHtmlUtil.addTd(tr, ""); - var fieldtext = self.myBitstreamFields.getKeyText(bitfield, item, bitfields); - for(var j=0; j"+fieldtext[j]+"")); - } - }); - } - }); - self.displayItems(filter + " Items in " + data.name, - offset, - self.ITEM_LIMIT, - data.numberItems, - function(){self.drawItemTable(cid, filter, (offset - self.ITEM_LIMIT < 0) ? 0 : offset - self.ITEM_LIMIT);}, - function(){self.drawItemTable(cid, filter, offset + self.ITEM_LIMIT);} - ); - - if (self.hasSorttable()){ - sorttable.makeSortable(itbl[0]); - } - $("#metadatadiv").accordion("option", "active", self.ACCIDX_ITEM); - }, - error: function(xhr, status, errorThrown) { - alert("Error in /rest/filtered-collections "+ status+ " " + errorThrown); - }, - complete: function(xhr, status) { - self.spinner.stop(); - $(".showCollections").attr("disabled", false); - $("#itemResults").accordion("option", "active", self.IACCIDX_ITEM); - } - }); - } - - //Ignore the first column containing a row number and the item handle - this.exportCol = function(colnum, col) { - var data = ""; - if (colnum == 0) return ""; - if (colnum == 2) return ""; - data += (colnum == 1) ? "" : ","; - data += self.exportCell(col); - return data; - } -} -CollReport.prototype = Object.create(Report.prototype); - -$(document).ready(function(){ - var myReport=new CollReport(); - myReport.init(); -}); \ No newline at end of file diff --git a/dspace-rest/src/main/webapp/static/reports/restQueryReport.js b/dspace-rest/src/main/webapp/static/reports/restQueryReport.js deleted file mode 100644 index 9a8297fb6928..000000000000 --- a/dspace-rest/src/main/webapp/static/reports/restQueryReport.js +++ /dev/null @@ -1,351 +0,0 @@ -/* - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -var QueryReport = function() { - Report.call(this); - - //If sortable.js is included, uncomment the following - //this.hasSorttable = function(){return true;} - this.getLangSuffix = function(){ - return "[en]"; - } - - //Indicate if Password Authentication is supported - //this.makeAuthLink = function(){return true;}; - //Indicate if Shibboleth Authentication is supported - //this.makeShibLink = function(){return true;}; - - this.getDefaultParameters = function(){ - return { - "collSel[]" : [], - "query_field[]" : [], - "query_op[]" : [], - "query_val[]" : [], - "show_fields[]" : [], - "show_fields_bits[]" : [], - "filters" : "", - "limit" : this.ITEM_LIMIT, - "offset" : 0, - }; - } - this.getCurrentParameters = function(){ - var expand = "parentCollection,metadata"; - if (this.myBitstreamFields.hasBitstreamFields()) { - expand += ",bitstreams"; - } - var params = { - "query_field[]" : [], - "query_op[]" : [], - "query_val[]" : [], - "collSel[]" : ($("#collSel").val() == null) ? [""] : $("#collSel").val(), - limit : this.myReportParameters.getLimit(), - offset : this.myReportParameters.getOffset(), - "expand" : expand, - filters : this.myFilters.getFilterList(), - "show_fields[]" : this.myMetadataFields.getShowFields(), - "show_fields_bits[]" : this.myBitstreamFields.getShowFieldsBits(), - }; - $("select.query-tool,input.query-tool").each(function() { - var paramArr = params[$(this).attr("name")]; - paramArr[paramArr.length] = $(this).val(); - }); - return params; - } - var self = this; - - this.init = function() { - this.baseInit(); - var communitySelector = new CommunitySelector(this, $("#collSelector"), this.myReportParameters.params["collSel[]"]); - } - - this.initMetadataFields = function() { - this.myMetadataFields = new QueryableMetadataFields(self); - this.myMetadataFields.load(); - } - this.myAuth.callback = function(data) { - $(".query-button").click(function(){self.runQuery();}) - } - - this.runQuery = function() { - this.spinner.spin($("body")[0]); - $("button").attr("disabled", true); - $.ajax({ - url: "/rest/filtered-items", - data: this.getCurrentParameters(), - dataType: "json", - headers: self.myAuth.getHeaders(), - success: function(data){ - data.metadata = $("#show-fields select").val(); - data.bitfields = $("#show-fields-bits select").val(); - self.drawItemFilterTable(data); - self.spinner.stop(); - $("button").not("#next,#prev").attr("disabled", false); - }, - error: function(xhr, status, errorThrown) { - alert("Error in /rest/filtered-items "+ status+ " " + errorThrown); - }, - complete: function(xhr, status, errorThrown) { - self.spinner.stop(); - $("button").not("#next,#prev").attr("disabled", false); - } - }); - } - - this.drawItemFilterTable = function(data) { - $("#itemtable").replaceWith($('
    ')); - var itbl = $("#itemtable"); - var tr = self.myHtmlUtil.addTr(itbl).addClass("header"); - self.myHtmlUtil.addTh(tr, "Num").addClass("num").addClass("sorttable_numeric"); - self.myHtmlUtil.addTh(tr, "id"); - self.myHtmlUtil.addTh(tr, "collection"); - self.myHtmlUtil.addTh(tr, "Item Handle"); - self.myHtmlUtil.addTh(tr, "dc.title" + self.getLangSuffix()); - - var mdCols = []; - if (data.metadata) { - $.each(data.metadata, function(index, field) { - if (field != "") { - self.myHtmlUtil.addTh(tr,field + self.getLangSuffix()).addClass("returnFields"); - mdCols[mdCols.length] = field; - } - }); - } - - if (data.bitfields) { - $.each(data.bitfields, function(index, bitfield) { - if (bitfield != "") { - self.myHtmlUtil.addTh(tr,bitfield).addClass("returnFields"); - mdCols[mdCols.length] = bitfield; - } - }); - } - - $.each(data.items, function(index, item){ - var tr = self.myHtmlUtil.addTr(itbl); - tr.addClass(index % 2 == 0 ? "odd data" : "even data"); - self.myHtmlUtil.addTd(tr, self.myReportParameters.getOffset()+index+1).addClass("num"); - self.myHtmlUtil.addTd(tr, self.getId(item)); - if (item.parentCollection == null) { - self.myHtmlUtil.addTd(tr, "--"); - } else { - self.myHtmlUtil.addTdAnchor(tr, item.parentCollection.name, self.ROOTPATH + item.parentCollection.handle); - } - self.myHtmlUtil.addTdAnchor(tr, item.handle, self.ROOTPATH + item.handle); - self.myHtmlUtil.addTd(tr, item.name); - - for(var i=0; i"+metadata.value+""); - td.append(div); - } - } - }); - var fieldtext = self.myBitstreamFields.getKeyText(key, item, data.bitfields); - for(var j=0; j"+fieldtext[j]+"")) - } - } - }); - - this.displayItems(data["query-annotation"], - this.myReportParameters.getOffset(), - this.myReportParameters.getLimit(), - data["unfiltered-item-count"], - function(){ - self.myReportParameters.updateOffset(false); - self.runQuery(); - }, - function(){ - self.myReportParameters.updateOffset(true); - self.runQuery(); - } - ); - - if (this.hasSorttable()) { - sorttable.makeSortable(itbl[0]); - } - $("#metadatadiv").accordion("option", "active", $("#metadatadiv > h3").length - 1); - } - - //Ignore the first column containing a row number and the item handle, get handle for the collection - this.exportCol = function(colnum, col) { - var data = ""; - if (colnum == 0) return ""; - if (colnum == 3) return ""; - data += (colnum == 1) ? "" : ","; - - if (colnum == 2) { - var anchor = $(col).find("a"); - var href = anchor.is("a") ? anchor.attr("href").replace(self.ROOTPATH,"") : $(col).text(); - data += "\"" + href + "\""; - } else { - data += self.exportCell(col); } - return data; - } -} -QueryReport.prototype = Object.create(Report.prototype); - -$(document).ready(function(){ - var myReport=new QueryReport(); - myReport.init(); -}); - -var QueryableMetadataFields = function(report) { - MetadataFields.call(this, report); - var self = this; - - this.initFields = function(data, report) { - self.metadataSchemas = data; - var params = report.myReportParameters.params; - var fields = params["query_field[]"]; - var ops = params["query_op[]"]; - var vals = params["query_val[]"]; - if (fields && ops && vals) { - if (fields.length == 0) { - self.drawFilterQuery("*","exists",""); - } else { - for(var i=0; i i ? ops[i] : ""; - var val = vals.length > i ? vals[i] : ""; - self.drawFilterQuery(fields[i],op,val); - } - } - } - self.drawShowFields(params["show_fields[]"]); - self.initQueries(); - report.spinner.stop(); - $(".query-button").attr("disabled", false); - } - - this.initQueries = function() { - $("#predefselect") - .append($("")) - .append($("")) - .append($("")) - .append($("")) - .append($("")) - .append($("")) - .append($("")) - .append($("")) - .append($("")) - .append($("")) - .append($("")) - .append($("")) - .append($("")) - .on("change",function(){ - $("div.metadata").remove(); - var val = $("#predefselect").val(); - if (val == 'new') { - self.drawFilterQuery("","",""); - } else if (val == 'q1') { - self.drawFilterQuery("dc.title","doesnt_exist",""); - } else if (val == 'q2') { - self.drawFilterQuery("dc.identifier.uri","doesnt_exist",""); - } else if (val == 'q3') { - self.drawFilterQuery("dc.subject.*","like","%;%"); - } else if (val == 'q4') { - self.drawFilterQuery("dc.contributor.author","like","% and %"); - } else if (val == 'q5') { - self.drawFilterQuery("dc.creator","like","% and %"); - } else if (val == 'q6') { - self.drawFilterQuery("dc.description","matches","^.*(http://|https://|mailto:).*$"); - } else if (val == 'q7') { - self.drawFilterQuery("dc.description.provenance","matches","^.*No\\. of bitstreams(.|\\r|\\n|\\r\\n)*\\.(PDF|pdf|DOC|doc|PPT|ppt|DOCX|docx|PPTX|pptx).*$"); - } else if (val == 'q8') { - self.drawFilterQuery("dc.description.provenance","doesnt_match","^.*No\\. of bitstreams(.|\\r|\\n|\\r\\n)*\\.(PDF|pdf|DOC|doc|PPT|ppt|DOCX|docx|PPTX|pptx).*$"); - } else if (val == 'q9') { - self.drawFilterQuery("*","matches","^\\s*$"); - } else if (val == 'q10') { - self.drawFilterQuery("dc.description.*","matches","^.*[^\\s]{50,}.*$"); - } else if (val == 'q12') { - self.drawFilterQuery("*","matches","^.*&#.*$"); - } else if (val == 'q13') { - self.drawFilterQuery("*","matches","^.*[^[:ascii:]].*$"); - } - }); - } - - this.drawFilterQuery = function(pField, pOp, pVal) { - var div = $("