diff --git a/.gitignore b/.gitignore
index 37db4a9d156..390a4a56ce0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,3 +22,4 @@ scripts/api/py_api_wrapper/local-data/*
doc/sphinx-guides/build
faces-config.NavData
src/main/java/BuildNumber.properties
+/nbproject/
\ No newline at end of file
diff --git a/Vagrantfile b/Vagrantfile
index 8ae1785d2b7..5df7800195f 100644
--- a/Vagrantfile
+++ b/Vagrantfile
@@ -15,6 +15,11 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
puts "OPERATING_SYSTEM environment variable not specified. Using #{operating_system} by default.\nTo specify it in bash: export OPERATING_SYSTEM=debian"
config.vm.box_url = "http://puppet-vagrant-boxes.puppetlabs.com/centos-65-x64-virtualbox-puppet.box"
config.vm.box = "puppet-vagrant-boxes.puppetlabs.com-centos-65-x64-virtualbox-puppet.box"
+ elsif ENV['OPERATING_SYSTEM'] == 'centos7'
+ puts "WARNING: CentOS 7 specified. Newer than what the dev team tests on."
+ config.vm.box_url = "https://atlas.hashicorp.com/puppetlabs/boxes/centos-7.2-64-puppet/versions/1.0.1/providers/virtualbox.box"
+ config.vm.box = "puppetlabs-centos-7.2-64-puppet-1.0.1-virtualbox.box"
+ standalone.vm.box = "puppetlabs-centos-7.2-64-puppet-1.0.1-virtualbox.box"
elsif ENV['OPERATING_SYSTEM'] == 'debian'
puts "WARNING: Debian specified. Here be dragons! https://github.com/IQSS/dataverse/issues/1059"
config.vm.box_url = "http://puppet-vagrant-boxes.puppetlabs.com/debian-73-x64-virtualbox-puppet.box"
diff --git a/conf/solr/4.6.0/schema.xml b/conf/solr/4.6.0/schema.xml
index 6fa5892858d..10f9b07be5c 100644
--- a/conf/solr/4.6.0/schema.xml
+++ b/conf/solr/4.6.0/schema.xml
@@ -249,6 +249,8 @@
+
+
diff --git a/JAVADOC_GUIDE.md b/doc/JAVADOC_GUIDE.md
similarity index 100%
rename from JAVADOC_GUIDE.md
rename to doc/JAVADOC_GUIDE.md
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibboleth2.xml b/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibboleth2.xml
index 5b67396b2be..dc79aebde38 100644
--- a/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibboleth2.xml
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/shibboleth/shibboleth2.xml
@@ -9,6 +9,7 @@ https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPConfiguration
-->
@@ -54,6 +55,23 @@ https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPConfiguration
+
+
diff --git a/doc/sphinx-guides/source/_static/installation/files/home/rpmbuild/rpmbuild/RPMS/x86_64/rapache-1.2.7-rpm0.x86_64.rpm b/doc/sphinx-guides/source/_static/installation/files/home/rpmbuild/rpmbuild/RPMS/x86_64/rapache-1.2.7-rpm0.x86_64.rpm
new file mode 100644
index 00000000000..9ca6086c86a
Binary files /dev/null and b/doc/sphinx-guides/source/_static/installation/files/home/rpmbuild/rpmbuild/RPMS/x86_64/rapache-1.2.7-rpm0.x86_64.rpm differ
diff --git a/doc/sphinx-guides/source/admin/harvestclients.rst b/doc/sphinx-guides/source/admin/harvestclients.rst
new file mode 100644
index 00000000000..3b7be95f790
--- /dev/null
+++ b/doc/sphinx-guides/source/admin/harvestclients.rst
@@ -0,0 +1,37 @@
+Managing Harvesting Clients
+===========================
+
+.. contents:: :local:
+
+Your Dataverse as a Metadata Harvester
+--------------------------------------
+
+Harvesting is a process of exchanging metadata with other repositories. As a harvesting *client*, your Dataverse can
+gather metadata records from remote sources. These can be other Dataverse instances or other archives that support OAI-PMH, the standard harvesting protocol. Harvested metadata records will be indexed and made searchable by your users. Clicking on a harvested dataset in the search results takes the user to the original repository. Harvested datasets cannot be edited in your Dataverse installation.
+
+Harvested records can be kept in sync with the original repository through scheduled incremental updates, daily or weekly.
+Alternatively, harvests can be run on demand, by the Admin.
+
+Managing Harvesting Clients
+---------------------------
+
+To start harvesting metadata from a remote OAI repository, you first create and configure a *Harvesting Client*.
+
+Clients are managed on the "Harvesting Clients" page accessible via the Dashboard. Click on the *Add Client* button to get started.
+
+The process of creating a new, or editing an existing client, is largely self-explanatory. It is split into logical steps, in a way that allows the user to go back and correct the entries made earlier. The process is interactive and guidance text is provided. For example, the user is required to enter the URL of the remote OAI server. When they click *Next*, the application will try to establish a connection to the server in order to verify that it is working, and to obtain the information about the sets of metadata records and the metadata formats it supports. The choices offered to the user on the next page will be based on this extra information. If the application fails to establish a connection to the remote archive at the address specified, or if an invalid response is received, the user is given an opportunity to check and correct the URL they entered.
+
+New in Dataverse 4, vs. DVN 3
+-----------------------------
+
+
+- Note that when creating a client you will need to select an existing local dataverse to host the datasets harvested. In DVN 3, a dedicated "harvesting dataverse" would be created specifically for each remote harvesting source. In Dataverse 4, harvested content can be added to *any dataverse*. This means that a dataverse can now contain datasets harvested from multiple sources and/or a mix of local and harvested datasets.
+
+
+- An extra "Archive Type" pull down menu is added to the Create and Edit dialogs. This setting, selected from the choices such as "Dataverse 4", "DVN, v2-3", "Generic OAI", etc. is used to properly format the harvested metadata as they are shown in the search results. It is **very important** to select the type that best describes this remote server, as failure to do so can result in information missing from the search results, and, a **failure to redirect the user to the archival source** of the data!
+
+ It is, however, **very easy to correct** a mistake like this. For example, let's say you have created a client to harvest from the XYZ Institute and specified the archive type as "Dataverse 4". You have been able to harvest content, the datasets appear in search result, but clicking on them results in a "Page Not Found" error on the remote site. At which point you realize that the XYZ Institute admins have not yet upgraded to Dataverse 4, still running DVN v3.1.2 instead. All you need to do is go back to the Harvesting Clients page, and change the setting to "DVN, v2-3". This will fix the redirects **without having to re-harvest** the datasets.
+
+- Another extra entry, "Archive Description", is added to the *Edit Harvesting Client* dialog. This description appears at the bottom of each search result card for a harvested dataset or datafile. By default, this text reads "This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data." Here it can be customized to be more descriptive, for example, "This Dataset is harvested from our partners at the XYZ Institute..."
+
+
diff --git a/doc/sphinx-guides/source/admin/harvestserver.rst b/doc/sphinx-guides/source/admin/harvestserver.rst
new file mode 100644
index 00000000000..333ae27e925
--- /dev/null
+++ b/doc/sphinx-guides/source/admin/harvestserver.rst
@@ -0,0 +1,130 @@
+Managing Harvesting Server and Sets
+===================================
+
+.. contents:: :local:
+
+Your Dataverse as an OAI server
+-------------------------------
+
+As a harvesting *server*, your Dataverse can make some of the local
+dataset metadata available to remote harvesting clients. These can be
+other Dataverse instances, or any other clients that support OAI-PMH
+harvesting protocol. Note that the terms "Harvesting Server" and "OAI
+Server" are being used interchangeably throughout this guide and in
+the inline help text.
+
+How does it work?
+-----------------
+
+Only the published, unrestricted datasets in your Dataverse can
+be made harvestable. Remote clients normally keep their records in sync
+through scheduled incremental updates, daily or weekly, thus
+minimizing the load on your server. Note that it is only the metadata
+that are harvested. Remote harvesters will generally not attempt to
+download the data files associated with the harvested datasets.
+
+Harvesting server can be enabled or disabled on the "Harvesting
+Server" page accessible via the Dashboard. Harvesting server is by
+default disabled on a brand new, "out of the box" Dataverse.
+
+OAI Sets
+--------
+
+Once the service is enabled, you define collections of local datasets
+that will be available to remote harvesters as *OAI Sets*. Once again,
+the terms "OAI Set" and "Harvesting Set" are used
+interchangeably. Sets are defined by search queries. Any such query
+that finds any number of published, local (non-harvested) datasets can
+be used to create an OAI set. Sets can overlap local dataverses, and
+can include as few or as many of your local datasets as you wish. A
+good way to master the Dataverse search query language is to
+experiment with the Advanced Search page. We also recommend that you
+consult the Search API section of the Dataverse User Guide.
+
+Once you have entered the search query and clicked *Next*, the number
+of search results found will be shown on the next screen. This way, if
+you are seeing a number that's different from what you expected, you
+can go back and try to re-define the query.
+
+Some useful examples of search queries to define OAI sets:
+
+- A good way to create a set that would include all your local, published datasets is to do so by the Unique Identifier authority registered to your Dataverse, for example:
+
+ ``dsPersistentId:"doi:1234/"``
+
+ Note that double quotes must be used, since the search field value contains the colon symbol!
+
+ Note also that the search terms limiting the results to published and local datasets **are added to the query automatically**, so you don't need to worry about that.
+
+- A query to create a set to include the datasets from a specific local dataverse:
+
+ ``parentId:NNN``
+
+ where NNN is the database id of the dataverse object (consult the Dataverse table of the SQL database used by the application to verify the database id).
+
+- A query to find all the dataset by a certain author:
+
+ ``authorName:YYY``
+
+ where YYY is the name.
+
+- Complex queries can be created with multiple logical AND and OR operators. For example,
+
+ ``(authorName:YYY OR authorName:ZZZ) AND dsPublicationDate:NNNN``
+
+- Some further query examples:
+
+ For specific datasets using a persistentID:
+
+ ``(dsPersistentId:10.5000/ZZYYXX/ OR dsPersistentId:10.5000/XXYYZZ)``
+
+ For all datasets within a specific ID authority:
+
+ ``dsPersistentId:10.5000/XXYYZZ``
+
+ For all dataverses with subjects of Astronomy and Astrophysics or Earth and Environmental Sciences:
+
+ ``(dvSubject:"Astronomy and Astrophysics" OR dvSubject:"Earth and Environmental Sciences")``
+
+ For all datasets containing the keyword "censorship":
+
+ ``keywordValue:censorship``
+
+Important: New SOLR schema required!
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In order to be able to define OAI sets, your SOLR server must be upgraded with the search schema that came with the Dataverse release 4.5 (or later), and all your local datasets must be re-indexed, once the new schema is installed.
+
+OAI Set updates
+---------------
+
+Every time a new harvesting set is created, or changes are made to an
+existing set, the contents of the set are automatically updated - the
+Dataverse application will find the datasets defined by the query, and
+attempt to run the metadata export on the ones that haven't been
+exported yet. Only the datasets for which the export has completed
+successfully, and the results cached on the filesystem are included in
+the OAI sets advertised to the harvesting clients!
+
+This is in contrast to how the sets used to be managed in DVN v.3,
+where sets had to be exported manually before any such changes had
+effect.
+
+**Important:** Note however that changes made to the actual dataset
+metadata do not automatically trigger any corresponding OAI sets to
+be updated immediately! For example: let's say you have created an OAI set defined by
+the search query ``authorName:king``, that resulted in 43
+dataset records. If a new dataset by the same author is added and published, this **does not** immediately add the extra
+record to the set! It would simply be too expensive, to refresh all
+the sets every time any changes to the metadata are made.
+
+The OAI set will however be updated automatically by a scheduled metadata export job that
+runs every night (at 2AM, by default). This export timer is created
+and activated automatically every time the application is deployed
+or restarted. Once again, this is new in Dataverse 4, and unlike DVN
+v3, where export jobs had to be scheduled and activated by the admin
+user. See the "Export" section of the Admin guide, for more information on the automated metadata exports.
+
+It is still possible however to make changes like this be immediately
+reflected in the OAI server, by going to the *Harvesting Server* page
+and clicking the "Run Export" icon next to the desired OAI set.
diff --git a/doc/sphinx-guides/source/admin/index.rst b/doc/sphinx-guides/source/admin/index.rst
new file mode 100755
index 00000000000..e7dfb0bf46a
--- /dev/null
+++ b/doc/sphinx-guides/source/admin/index.rst
@@ -0,0 +1,21 @@
+.. Dataverse API Documentation master file, created by
+ sphinx-quickstart on Wed Aug 28 17:54:16 2013.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Admin Guide
+===========
+
+This guide documents the functionality only available to the Dataverse Admin ("Network Administrator") users. As of this release (4.5) such functionality includes managing Harvesting (client and server) and batch metadata export.
+
+These "superuser" tasks are managed via the new page called the Dashboard. A user logged in as a Dataverse Admin will see the Dashboard link rendered in the upper right corner of every Dataverse page.
+
+Contents:
+
+.. toctree::
+ :maxdepth: 2
+
+ harvestclients
+ harvestserver
+ metadataexport
+ timers
diff --git a/doc/sphinx-guides/source/admin/metadataexport.rst b/doc/sphinx-guides/source/admin/metadataexport.rst
new file mode 100644
index 00000000000..a8931b87716
--- /dev/null
+++ b/doc/sphinx-guides/source/admin/metadataexport.rst
@@ -0,0 +1,30 @@
+Metadata Export
+===============
+
+.. contents:: :local:
+
+Automatic Exports
+-----------------
+
+Unlike in DVN v3, publishing a dataset in Dataverse 4 automaticalliy starts a metadata export job, that will run in the background, asynchronously. Once completed, it will make the dataset metadata exported and cached in all the supported formats (Dublin Core, Data Documentation Initiative (DDI), and native JSON). There is no need to run the export manually.
+
+A scheduled timer job that runs nightly will attempt to export any published datasets that for whatever reason haven't been exported yet. This timer is activated automatically on the deployment, or restart, of the application. So, again, no need to start or configure it manually. (See the "Application Timers" section of this guide for more information)
+
+Batch exports through the API
+-----------------------------
+
+In addition to the automated exports, a Dataverse admin can start a batch job through the API. The following 2 API calls are provided:
+
+/api/admin/metadata/exportAll
+
+/api/admin/metadata/reExportAll
+
+The former will attempt to export all the published, local (non-harvested) datasets that haven't been exported yet.
+The latter will *force* a re-export of every published, local dataset, regardless of whether it has already been exported or not.
+
+Note, that creating, modifying, or re-exporting an OAI set will also attempt to export all the unexported datasets found in the set.
+
+Export Failures
+---------------
+
+An export batch job, whether started via the API, or by the application timer, will leave a detailed log in your configured logs directory. This is the same location where your main Glassfish server.log is found. The name of the log file is ``export_[timestamp].log`` - for example, *export_2016-08-23T03-35-23.log*. The log will contain the numbers of datasets processed successfully and those for which metadata export failed, with some information on the failures detected. Please attach this log file if you need to contact Dataverse support about metadata export problems.
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/admin/timers.rst b/doc/sphinx-guides/source/admin/timers.rst
new file mode 100644
index 00000000000..083833f31be
--- /dev/null
+++ b/doc/sphinx-guides/source/admin/timers.rst
@@ -0,0 +1,43 @@
+Dataverse Application Timers
+============================
+
+.. contents:: :local:
+
+Dataverse uses timers to automatically run scheduled Harvest and Metadata export jobs.
+
+
+Dedicated timer server in a Dataverse server cluster
+----------------------------------------------------
+
+When running a Dataverse cluster - i.e. multiple Dataverse application
+servers talking to the same database - **only one** of them must act
+as the *dedicated timer server*. This is to avoid starting conflicting
+batch jobs on multiple nodes at the same time.
+
+This does not affect a single-server installation. So you can safely skip this section unless you are running a multi-server cluster.
+
+The following JVM option instructs the application to act as the dedicated timer server:
+
+``-Ddataverse.timerServer=true``
+
+**IMPORTANT:** Note, that this option is automatically set by the Dataverse installer script. That means that when configuring a multi-server cluster, it will be the responsibility of the installer to remove the option from the domain.xml of every node except the one intended to be the timer server.
+
+Harvesting Timers
+-----------------
+
+These timers are created when scheduled harvesting is enabled by a local admin user (via the "Manage Harvesting Clients" page).
+
+In a multi-node cluster, all these timers will be created on the dedicated timer node (and not necessarily on the node where the harvesting clients was created and/or saved).
+
+A timer will be automatically removed, when a harvesting client with an active schedule is deleted, or if the schedule is turned off for an existing client.
+
+Metadata Export Timer
+---------------------
+
+This timer is created automatically whenever the application is deployed or restarted. There is no admin user-accessible configuration for this timer.
+
+This timer runs a daily job that tries to export all the local, published datasets that haven't been exported yet, in all the supported metdata formats, and cache the results on the filesystem. (Note that, normally, an export will happen automatically whenever a dataset is published. So this scheduled job is there to catch any datasets for which that export did not succeed, for one reason or another). Also, since this functionality has been added in version 4.5: if you are upgrading from a previous version, none of your datasets are exported yet. So the first time this job runs, it will attempt to export them all.
+
+This daily job will also update all the harvestable OAI sets configured on your server, adding new and/or newly published datasets or marking deaccessioned datasets as "deleted" in the corresponding sets as needed.
+
+This job is automatically scheduled to run at 2AM local time every night. If really necessary, it is possible (for an advanced user) to change that time by directly editing the EJB timer application table in the database.
diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst
index 2a92244e21f..59af4f00eee 100644
--- a/doc/sphinx-guides/source/api/native-api.rst
+++ b/doc/sphinx-guides/source/api/native-api.rst
@@ -110,8 +110,6 @@ Datasets
GET http://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/J8SJZB
-
-
Show the dataset whose id is passed::
GET http://$SERVER/api/datasets/$id?key=$apiKey
@@ -127,6 +125,14 @@ List versions of the dataset::
Show a version of the dataset. The Dataset also include any metadata blocks the data might have::
GET http://$SERVER/api/datasets/$id/versions/$versionNumber?key=$apiKey
+
+
+Export the metadata of the current published version of a dataset in various formats see Note below::
+
+ GET http://$SERVER/api/datasets/export?exporter=ddi&persistentId=$persistentId
+
+ Note: Supported exporters (export formats) are ddi, oai_ddi, dcterms, oai_dc, and dataverse_json.
+
Lists all the file metadata, for the given dataset and version::
@@ -162,6 +168,21 @@ Restores the default logic of the field type to be used as the citation date. Sa
DELETE http://$SERVER/api/datasets/$id/citationdate?key=$apiKey
+List all the role assignments at the given dataset::
+
+ GET http://$SERVER/api/datasets/$id/assignments?key=$apiKey
+
+Create a Private URL (must be able to manage dataset permissions)::
+
+ POST http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey
+
+Get a Private URL from a dataset (if available)::
+
+ GET http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey
+
+Delete a Private URL from a dataset (if it exists)::
+
+ DELETE http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey
Builtin Users
~~~~~
@@ -244,6 +265,13 @@ Shibboleth Groups
Management of Shibboleth groups via API is documented in the :doc:`/installation/shibboleth` section of the Installation Guide.
+Info
+~~~~
+
+For now, only the value for the ``:DatasetPublishPopupCustomText`` setting from the :doc:`/installation/config` section of the Installation Guide is exposed::
+
+ GET http://$SERVER/api/info/settings/:DatasetPublishPopupCustomText
+
Metadata Blocks
~~~~~~~~~~~~~~~
diff --git a/doc/sphinx-guides/source/api/sword.rst b/doc/sphinx-guides/source/api/sword.rst
index b8faa891499..dbc4283395c 100755
--- a/doc/sphinx-guides/source/api/sword.rst
+++ b/doc/sphinx-guides/source/api/sword.rst
@@ -3,11 +3,11 @@ SWORD API
SWORD_ stands for "Simple Web-service Offering Repository Deposit" and is a "profile" of AtomPub (`RFC 5023`_) which is a RESTful API that allows non-Dataverse software to deposit files and metadata into a Dataverse installation. :ref:`client-libraries` are available in Python, Java, R, Ruby, and PHP.
-Introduced in Dataverse Network (DVN) `3.6 `_, the SWORD API was formerly known as the "Data Deposit API" and ``data-deposit/v1`` appeared in the URLs. For backwards compatibility these URLs will continue to work (with deprecation warnings). Due to architectural changes and security improvements (especially the introduction of API tokens) in Dataverse 4.0, a few backward incompatible changes were necessarily introduced and for this reason the version has been increased to ``v1.1``. For details, see :ref:`incompatible`.
+Introduced in Dataverse Network (DVN) `3.6 `_, the SWORD API was formerly known as the "Data Deposit API" and ``data-deposit/v1`` appeared in the URLs. For backwards compatibility these URLs continue to work (with deprecation warnings). Due to architectural changes and security improvements (especially the introduction of API tokens) in Dataverse 4.0, a few backward incompatible changes were necessarily introduced and for this reason the version has been increased to ``v1.1``. For details, see :ref:`incompatible`.
Dataverse implements most of SWORDv2_, which is specified at http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html . Please reference the `SWORDv2 specification`_ for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc. For a quick introduction to SWORD, the two minute video at http://cottagelabs.com/news/intro-to-sword-2 is recommended.
-As a profile of AtomPub, XML is used throughout SWORD. As of Dataverse 4.0 datasets can also be created via JSON using the "native" API.
+As a profile of AtomPub, XML is used throughout SWORD. As of Dataverse 4.0 datasets can also be created via JSON using the "native" API. SWORD is limited to the dozen or so fields listed below in the crosswalk, but the native API allows you to populate all metadata fields available in Dataverse.
.. _SWORD: http://en.wikipedia.org/wiki/SWORD_%28protocol%29
@@ -24,9 +24,9 @@ As a profile of AtomPub, XML is used throughout SWORD. As of Dataverse 4.0 datas
Backward incompatible changes
-----------------------------
-For better security, usernames and passwords are no longer accepted. The use of an API token is required.
+For better security than in DVN 3.x, usernames and passwords are no longer accepted. The use of an API token is required.
-In addition, differences in Dataverse 4.0 have lead to a few minor backward incompatible changes in the Dataverse implementation of SWORD, which are listed below. Old ``v1`` URLs should continue to work but the ``Service Document`` will contain a deprecation warning and responses will contain ``v1.1`` URLs. See also :ref:`known-issues`.
+Differences in Dataverse 4 from DVN 3.x lead to a few minor backward incompatible changes in the Dataverse implementation of SWORD, which are listed below. Old ``v1`` URLs should continue to work but the ``Service Document`` will contain a deprecation warning and responses will contain ``v1.1`` URLs. See also :ref:`known-issues`.
- Newly required fields when creating/editing datasets for compliance with the `Joint Declaration for Data Citation principles `_.
@@ -41,11 +41,13 @@ In addition, differences in Dataverse 4.0 have lead to a few minor backward inco
New features as of v1.1
-----------------------
-- Dataverse 4.0 supports API tokens and they must be used rather that a username and password. In the ``curl`` examples below, you will see ``curl -u $API_TOKEN:`` showing that you should send your API token as the username and nothing as the password. For example, ``curl -u 54b143b5-d001-4254-afc0-a1c0f6a5b5a7:``.
+- Dataverse 4 supports API tokens and they must be used rather that a username and password. In the ``curl`` examples below, you will see ``curl -u $API_TOKEN:`` showing that you should send your API token as the username and nothing as the password. For example, ``curl -u 54b143b5-d001-4254-afc0-a1c0f6a5b5a7:``.
-- Dataverses can be published via SWORD
+- SWORD operations no longer require "admin" permission. In order to use any SWORD operation in DVN 3.x, you had to be "admin" on a dataverse (the container for your dataset) and similar rules were applied in Dataverse 4.4 and earlier (the ``EditDataverse`` permission was required). The SWORD API has now been fully integrated with the Dataverse 4 permission model such that any action you have permission to perform in the GUI or "native" API you are able to perform via SWORD. This means that even a user with a "Contributor" role can operate on datasets via SWORD. Note that users with the "Contributor" role do not have the ``PublishDataset`` permission and will not be able publish their datasets via any mechanism, GUI or API.
-- Datasets versions will only be increased to the next minor version (i.e. 1.1) rather than a major version (2.0) if possible. This depends on the nature of the change.
+- Dataverses can be published via SWORD.
+
+- Datasets versions will only be increased to the next minor version (i.e. 1.1) rather than a major version (2.0) if possible. This depends on the nature of the change. Adding or removing, a file, for example, requires a major version bump.
- "Author Affiliation" can now be populated with an XML attribute. For example: Stumptown, Jane
@@ -67,13 +69,15 @@ curl examples
Retrieve SWORD service document
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The service document enumerates the dataverses ("collections" from a SWORD perspective) the user can deposit data into. The "collectionPolicy" element for each dataverse contains the Terms of Use.
+The service document enumerates the dataverses ("collections" from a SWORD perspective) the user can deposit data into. The "collectionPolicy" element for each dataverse contains the Terms of Use. Any user with an API token can use this API endpoint. Institution-wide Shibboleth groups are not respected because membership in such a group can only be set via a browser.
``curl -u $API_TOKEN: https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/service-document``
Create a dataset with an Atom entry
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+To create a dataset, you must have the "Dataset Creator" role (the ``AddDataset`` permission) on a dataverse. Practically speaking, you should first retrieve the service document to list the dataverses into which you are authorized to deposit data.
+
``curl -u $API_TOKEN: --data-binary "@path/to/atom-entry-study.xml" -H "Content-Type: application/atom+xml" https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/collection/dataverse/$DATAVERSE_ALIAS``
Example Atom entry (XML)
@@ -81,7 +85,7 @@ Example Atom entry (XML)
.. literalinclude:: sword-atom-entry.xml
Dublin Core Terms (DC Terms) Qualified Mapping - Dataverse DB Element Crosswalk
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+
|DC (terms: namespace) | Dataverse DB Element | Required | Note |
+=============================+==============================================+==============+=============================================================================================================================================================+
@@ -117,16 +121,22 @@ Dublin Core Terms (DC Terms) Qualified Mapping - Dataverse DB Element Crosswalk
List datasets in a dataverse
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+You must have permission to add datasets in a dataverse (the dataverse should appear in the service document) to list the datasets inside. Institution-wide Shibboleth groups are not respected because membership in such a group can only be set via a browser.
+
``curl -u $API_TOKEN: https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/collection/dataverse/$DATAVERSE_ALIAS``
Add files to a dataset with a zip file
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+You must have ``EditDataset`` permission (Contributor role or above such as Curator or Admin) on the dataset to add files.
+
``curl -u $API_TOKEN: --data-binary @path/to/example.zip -H "Content-Disposition: filename=example.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/edit-media/study/doi:TEST/12345``
Display a dataset atom entry
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+You must have ``ViewUnpublishedDataset`` permission (Contributor role or above such as Curator or Admin) on the dataset to view its Atom entry.
+
Contains data citation (bibliographicCitation), alternate URI (persistent URI of study), edit URI, edit media URI, statement URI.
``curl -u $API_TOKEN: https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/edit/study/doi:TEST/12345``
@@ -134,45 +144,49 @@ Contains data citation (bibliographicCitation), alternate URI (persistent URI of
Display a dataset statement
~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Contains title, author, feed of file entries, latestVersionState, locked boolean, updated timestamp.
+Contains title, author, feed of file entries, latestVersionState, locked boolean, updated timestamp. You must have ``ViewUnpublishedDataset`` permission (Contributor role or above such as Curator or Admin) on the dataset to display the statement.
``curl -u $API_TOKEN: https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/statement/study/doi:TEST/12345``
Delete a file by database id
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+You must have ``EditDataset`` permission (Contributor role or above such as Curator or Admin) on the dataset to delete files.
+
``curl -u $API_TOKEN: -X DELETE https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/edit-media/file/123``
Replacing metadata for a dataset
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Please note that **ALL** metadata (title, author, etc.) will be replaced, including fields that can not be expressed with "dcterms" fields.
+Please note that **ALL** metadata (title, author, etc.) will be replaced, including fields that can not be expressed with "dcterms" fields. You must have ``EditDataset`` permission (Contributor role or above such as Curator or Admin) on the dataset to replace metadata.
``curl -u $API_TOKEN: --upload-file "path/to/atom-entry-study2.xml" -H "Content-Type: application/atom+xml" https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/edit/study/doi:TEST/12345``
Delete a dataset
~~~~~~~~~~~~~~~~
+You must have the ``DeleteDatasetDraft`` permission (Contributor role or above such as Curator or Admin) on the dataset to delete it. Please note that if the dataset has never been published you will be able to delete it completely but if the dataset has already been published you will only be able to delete post-publication drafts, never a published version.
+
``curl -u $API_TOKEN: -i -X DELETE https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/edit/study/doi:TEST/12345``
Determine if a dataverse has been published
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Look for a `dataverseHasBeenReleased` boolean.
+This API endpoint is the same as the "list datasets in a dataverse" endpoint documented above and the same permissions apply but it is documented here separately to point out that you can look for a boolean called ``dataverseHasBeenReleased`` to know if a dataverse has been released, which is required for publishing a dataset.
``curl -u $API_TOKEN: https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/collection/dataverse/$DATAVERSE_ALIAS``
Publish a dataverse
~~~~~~~~~~~~~~~~~~~
-The ``cat /dev/null`` and ``--data-binary @-`` arguments are used to send zero-length content to the API, which is required by the upstream library to process the ``In-Progress: false`` header.
+The ``cat /dev/null`` and ``--data-binary @-`` arguments are used to send zero-length content to the API, which is required by the upstream library to process the ``In-Progress: false`` header. You must have the ``PublishDataverse`` permission (Admin role) on the dataverse to publish it.
``cat /dev/null | curl -u $API_TOKEN: -X POST -H "In-Progress: false" --data-binary @- https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/edit/dataverse/$DATAVERSE_ALIAS``
Publish a dataset
~~~~~~~~~~~~~~~~~
-The ``cat /dev/null`` and ``--data-binary @-`` arguments are used to send zero-length content to the API, which is required by the upstream library to process the ``In-Progress: false`` header.
+The ``cat /dev/null`` and ``--data-binary @-`` arguments are used to send zero-length content to the API, which is required by the upstream library to process the ``In-Progress: false`` header. You must have the ``PublishDataset`` permission (Curator or Admin role) on the dataset to publish it.
``cat /dev/null | curl -u $API_TOKEN: -X POST -H "In-Progress: false" --data-binary @- https://$HOSTNAME/dvn/api/data-deposit/v1.1/swordv2/edit/study/doi:TEST/12345``
@@ -181,22 +195,15 @@ The ``cat /dev/null`` and ``--data-binary @-`` arguments are used to send zero-l
Known issues
------------
-- Potential mismatch between the dataverses ("collections" from a SWORD perspective) the user can deposit data into in returned by the Service Document and which dataverses the user can actually deposit data into. This is due to an incomplete transition from the old DVN 3.x "admin-only" style permission checking to the new permissions system in Dataverse 4.0 ( https://github.com/IQSS/dataverse/issues/1070 ). The mismatch was reported at https://github.com/IQSS/dataverse/issues/1443
-
-- Should see all the fields filled in for a dataset regardless of what the parent dataverse specifies: https://github.com/IQSS/dataverse/issues/756
-
-- Inefficiency in constructing the ``Service Document``: https://github.com/IQSS/dataverse/issues/784
-
-- Inefficiency in constructing the list of datasets: https://github.com/IQSS/dataverse/issues/784
+- Deleting a file from a published version (not a draft) creates a draft but doesn't delete the file: https://github.com/IQSS/dataverse/issues/2464
-Roadmap
--------
+- The Service Document does not honor groups within groups: https://github.com/IQSS/dataverse/issues/3056
-These are features we'd like to add in the future:
+- Should see all the fields filled in for a dataset regardless of what the parent dataverse specifies: https://github.com/IQSS/dataverse/issues/756
-- Implement SWORD 2.0 Profile 6.4: https://github.com/IQSS/dataverse/issues/183
+- SWORD 2.0 Profile 6.4 "Retrieving the content" has not been implemented: https://github.com/IQSS/dataverse/issues/183
-- Support deaccessioning via API: https://github.com/IQSS/dataverse/issues/778
+- Deaccessioning via API is not supported (it was in DVN 3.x): https://github.com/IQSS/dataverse/issues/778
- Let file metadata (i.e. description) be specified during zip upload: https://github.com/IQSS/dataverse/issues/723
diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py
index 4b99ee735e7..e9c09e0c61b 100755
--- a/doc/sphinx-guides/source/conf.py
+++ b/doc/sphinx-guides/source/conf.py
@@ -63,9 +63,9 @@
# built documents.
#
# The short X.Y version.
-version = '4.4'
+version = '4.5'
# The full version, including alpha/beta/rc tags.
-release = '4.4'
+release = '4.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/doc/sphinx-guides/source/index.rst b/doc/sphinx-guides/source/index.rst
index 414bcddd298..1c3a88dcdef 100755
--- a/doc/sphinx-guides/source/index.rst
+++ b/doc/sphinx-guides/source/index.rst
@@ -3,7 +3,7 @@
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
-Dataverse 4.4 Guides
+Dataverse 4.5 Guides
======================
These guides are for the most recent version of Dataverse. For the guides for **version 4.3.1** please go `here `_.
@@ -17,6 +17,7 @@ These guides are for the most recent version of Dataverse. For the guides for **
installation/index
api/index
developers/index
+ admin/index
How the Guides Are Organized
=============================
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index 10a9563e66a..142e69ff3e3 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -422,7 +422,7 @@ For Development only. Set ``GeoconnectDebug`` to true to allow a user to see SQ
:DatasetPublishPopupCustomText
++++++++++++++++++++++++++++++
-Set custom text a user will view when publishing a dataset.
+Set custom text a user will view when publishing a dataset. Note that this text is exposed via the "Info" endpoint of the :doc:`/api/native-api`.
``curl -X PUT -d "Deposit License Requirements" http://localhost:8080/api/admin/settings/:DatasetPublishPopupCustomText``
diff --git a/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst b/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst
index 6cbf69fa2e5..44cd29570b9 100644
--- a/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst
+++ b/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst
@@ -53,7 +53,9 @@ install rApache as follows::
yum install libapreq2
rpm -ivh http://mirror.hmdc.harvard.edu/HMDC-Public/RedHat-6/rapache-1.2.6-rpm0.x86_64.rpm
+If you are using RHEL/CentOS 7, you can `download an experimental rapache-1.2.7-rpm0.x86_64.rpm <../_static/installation/files/home/rpmbuild/rpmbuild/RPMS/x86_64/rapache-1.2.7-rpm0.x86_64.rpm>`_ and install it with::
+ rpm -ivh rapache-1.2.7-rpm0.x86_64.rpm
d. Install libcurl-devel:
-------------------------
diff --git a/doc/sphinx-guides/source/installation/shibboleth.rst b/doc/sphinx-guides/source/installation/shibboleth.rst
index b8c2c09ef03..7a93eb1eb52 100644
--- a/doc/sphinx-guides/source/installation/shibboleth.rst
+++ b/doc/sphinx-guides/source/installation/shibboleth.rst
@@ -3,11 +3,6 @@ Shibboleth
.. contents:: :local:
-Status: Experimental
---------------------
-
-Shibboleth support in Dataverse should be considered **experimental** until https://github.com/IQSS/dataverse/issues/2117 is closed (indicating that the feature has been in used in production at https://dataverse.harvard.edu for a while), but the `Dataverse development team `_ is eager to receive feedback on the Shibboleth feature (including these docs!) via any channel listed in the :doc:`intro` section.
-
Introduction
------------
@@ -178,11 +173,13 @@ Most Dataverse installations will probably only want to authenticate users via S
Identity Federation
+++++++++++++++++++
-Rather than specifying individual Identity Provider(s) you may wish to broaden the number of users who can log into your Dataverse installation by registering your Dataverse installation as a Service Provider (SP) within an identity federation. For example, in the United States, users from `hundreds of institutions registered with the "InCommon" identity federation `_ will be able to log into your Dataverse installation if you register it as one of the `thousands of Service Providers registered with InCommon `_.
+Rather than or in addition to specifying individual Identity Provider(s) you may wish to broaden the number of users who can log into your Dataverse installation by registering your Dataverse installation as a Service Provider (SP) within an identity federation. For example, in the United States, users from the `many institutions registered with the "InCommon" identity federation `_ that release the `"Research & Scholarship Attribute Bundle" `_ will be able to log into your Dataverse installation if you register it as an `InCommon Service Provider `_ that is part of the `Research & Scholarship (R&S) category `_.
The details of how to register with an identity federation are out of scope for this document, but a good starting point may be this list of identity federations across the world: http://www.protectnetwork.org/support/faq/identity-federations
-One of the benefits of using ``shibd`` is that it can be configured to periodically poll your identify federation for updates as new Identity Providers (IdPs) join the federation you've registered with. For the InCommon federation, the following page describes how to download and verify signed InCommon metadata every hour: https://spaces.internet2.edu/display/InCFederation/Shibboleth+Metadata+Config#ShibbolethMetadataConfig-ConfiguretheShibbolethSP
+One of the benefits of using ``shibd`` is that it can be configured to periodically poll your identity federation for updates as new Identity Providers (IdPs) join the federation you've registered with. For the InCommon federation, the following page describes how to download and verify signed InCommon metadata every hour: https://spaces.internet2.edu/display/InCFederation/Shibboleth+Metadata+Config#ShibbolethMetadataConfig-ConfiguretheShibbolethSP . You can also see an example of this as ``maxRefreshDelay="3600"`` in the commented out section of the ``shibboleth2.xml`` file above.
+
+Once you've joined a federation the list of IdPs in the dropdown can be quite long! If you're curious how many are in the list you could try something like this: ``curl https://dataverse.example.edu/Shibboleth.sso/DiscoFeed | jq '.[].entityID' | wc -l``
.. _shibboleth-attributes:
@@ -304,12 +301,14 @@ To create an institution-wide Shibboleth groups, create a JSON file as below and
.. literalinclude:: ../_static/installation/files/etc/shibboleth/shibGroupTestShib.json
-Note that institution-wide Shibboleth groups are based on the "Shib-Identity-Provider" attribute but https://github.com/IQSS/dataverse/issues/1515 tracks adding support for arbitrary attributes such as ""eduPersonScopedAffiliation", etc.
+Institution-wide Shibboleth groups are based on the "Shib-Identity-Provider" SAML attribute asserted at runtime after successful authentication with the Identity Provider (IdP) and held within the browser session rather than being persisted in the database for any length of time. It is for this reason that roles based on these groups, such as the ability to create a dataset, are not honored by non-browser interactions, such as through the SWORD API.
To list institution-wide Shibboleth groups: ``curl http://localhost:8080/api/admin/groups/shib``
To delete an institution-wide Shibboleth group (assuming id 1): ``curl -X DELETE http://localhost:8080/api/admin/groups/shib/1``
+Support for arbitrary attributes beyond "Shib-Identity-Provider" such as "eduPersonScopedAffiliation", etc. is being tracked at https://github.com/IQSS/dataverse/issues/1515
+
Converting Local Users to Shibboleth
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst
index 13bdf5928b3..0a6200583e5 100755
--- a/doc/sphinx-guides/source/user/dataset-management.rst
+++ b/doc/sphinx-guides/source/user/dataset-management.rst
@@ -16,6 +16,8 @@ A dataset contains three levels of metadata:
For more details about what Citation and Domain specific metadata is supported please see our `Appendix <../user/appendix.html#metadata-references>`_.
+Note that once a dataset has been published its metadata may be exported. A button on the dataset page's metadata tab will allow a user to export the metadata of the most recently published version of the dataset. Currently supported export formats are DDI, Dublin Core and JSON.
+
File Handling + Uploading
===============================
@@ -270,6 +272,27 @@ dataset before they decide to either "Publish" the dataset or "Return to Author"
will be notified that it is now published. If the dataset is returned to the author, the contributor of this dataset will be
notified that they need to make modifications before it can be submitted for review again.
+.. _privateurl:
+
+Private URL for Reviewing an Unpublished Dataset
+==================================================
+
+To share an unpublished dataset using Private URL
+----------------------------------------------------------------------
+
+Creating a Private URL for your dataset allows you to share your dataset (for viewing and downloading of files) before it is published to a wide group of individuals who may not have a user account on Dataverse. Anyone you send the Private URL to will not have to log into Dataverse to view the dataset.
+
+ 1. Go to your unpublished dataset
+ 2. Select the “Edit” button
+ 3. Select “Private URL” in the dropdown menu
+ 4. In the pop-up select “Create Private URL”
+ 5. Copy the Private URL which has been created for this dataset and it can now be shared with anyone you wish to have access to view or download files in your unpublished dataset.
+
+To disable a Private URL
+-------------------------------------
+
+If ever you had shared a Private URL to your dataset and wish to revoke access, follow the same steps as above until step #3 but in the pop-up select “Disable Private URL”.
+
Dataset Versioning
======================
diff --git a/doc/sphinx-guides/source/user/find-use-data.rst b/doc/sphinx-guides/source/user/find-use-data.rst
index f8ef39b6c3f..5395593a2ad 100755
--- a/doc/sphinx-guides/source/user/find-use-data.rst
+++ b/doc/sphinx-guides/source/user/find-use-data.rst
@@ -72,7 +72,7 @@ You can find the citation for the dataset at the top of the dataset page in a bl
Download Files
-----------------
-Within the Files tab on a dataset page, a user can either Explore tabular data files using TwoRavens, Download All File Formats + Information or individually download the Original File Format, Tab Delimited Format, Variable Metadata, Data File Citation (RIS Format or EndNote XML), or Subset (options appear depending on file format).
+Within the Files tab on a dataset page, a user can either Explore tabular data files using TwoRavens, Download All File Formats + Information or individually download the Original File Format, Tab Delimited Format, Variable Metadata, Data File Citation (EndNote XML, RIS Format, or BibTeX Format), or Subset (options appear depending on file format).
To download more than one file at a time, select the files you would like to download and then click the Download button above the files. The selected files will download in zip format.
diff --git a/doc/sphinx_bootstrap_theme/bootstrap/layout.html b/doc/sphinx_bootstrap_theme/bootstrap/layout.html
index 7deba6ab74f..3478e807b30 100755
--- a/doc/sphinx_bootstrap_theme/bootstrap/layout.html
+++ b/doc/sphinx_bootstrap_theme/bootstrap/layout.html
@@ -133,7 +133,7 @@
{%- if hasdoc('copyright') %}
{% trans path=pathto('copyright'), copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
{%- else %}
- {% trans copyright=copyright|e %}Data Science at The Institute for Quantitative Social Science | Code available at | Created using Sphinx {{ sphinx_version }} Version {{ version }} | Last updated on {{ last_updated }} © Copyright {{ copyright }} {% endtrans %}
+ {% trans copyright=copyright|e %}Data Science at The Institute for Quantitative Social Science | Code available at | Created using Sphinx {{ sphinx_version }} Version {{ version }} | Last updated on {{ last_updated }} © Copyright {{ copyright }} {% endtrans %}
{%- endif %}
{%- endif %}
diff --git a/doc/theTestableCommand/TheTestableCommand-outline.md b/doc/theTestableCommand/TheTestableCommand-outline.md
new file mode 100644
index 00000000000..84e3d03b9fc
--- /dev/null
+++ b/doc/theTestableCommand/TheTestableCommand-outline.md
@@ -0,0 +1,47 @@
+# The Testable Command
+
+_outline_
+* Intro
+ *√ Application Complexity
+ *√ Definitions of unit tests
+ *√ Positive def.
+ *√ Quick runs
+ *√ validate small portions of limited complexity
+ *√ Use the code in another context (aids portability, reuse, and, thus, overall quality)
+ *√ To some extent, can be read as a use guide and a specification
+ *√ No dependency on other processes
+ *√ No dependency on external files
+ *√ No dependency on hard-coded data that needs to be manually changed
+ * running under JUnit not enough
+ * Make sure to test the right thing, and to test the thing right (e.g. no `toString` equality, unless you're testing some logic that generates Strings).
+ *√ Why no embedded test servers
+ *√ Too many moving parts
+ *√ Part code, part spec, part magic (e.g. putting stuff in `private` fields!)
+ * Mention the Weld bug
+
+* Commands in Dataverse
+ *√ Command pattern
+ *√ Refer to the "Lean Beans are Made of This" presentation
+ * Making a command testable - what to do in the service bean and what should be done in a command
+ * √Command should not deal directly with anything that's not a service bean
+ or a domain model object - including the entity manager, API calls to Solr, file system calls,
+ HTTPRequest, JSFContext, etc.
+ * √ This roughly amounts to - Storage and retrieval "primitives" (of models) go in the bean, actions go on the commands.
+ * True, I've added the `em()` method to the `CommandContext` class. That was
+ while exploring the idea of removing the beans altogether. It works, but
+ its not testable. So it will be deprecated at some point.
+ *√ Any context object (JSFContext, HTTPRequest) should not be used by the command. Extract exactly what the command needs, and pass it as a parameter to the command's constructor.
+ * x e.g. `DataverseRequest` had a constructor that got a `HTTPRequest` as a parameter. Internally, that constructor extracted the source IP address and stored it in a field. To allow testing, a new constructor, one that gets only the IPAddress, was added.
+
+* Testing the command
+ * Setting up the domain context in on which the command acts
+ * Dataverses, Datasets....
+ * Use `MocksFactory` (lives in the test folder, not in src) to create sensible default objects.
+ * Hand-craft the instances needed for the test, to make sure the test case really tests what it needs to test.
+ * Create a `TestCommandContext` subclass, and override the methods providing the required service beans. The service beans might need to be subclassed as well, typically replacing database calls with actions on in-memory data structures.
+ * Create a `TestDataverseEngine` instance, and pass it an instance of the `TestCommandContext` subclass.
+ * Submit the command
+ * `Assert` ad nauseum.
+ * Command results
+ * Calls within the beans (e.g. validating that a method did get called, or not called more than once)
+ * Permissions required by the command
diff --git a/doc/theTestableCommand/TheTestableCommand.md b/doc/theTestableCommand/TheTestableCommand.md
new file mode 100644
index 00000000000..e60faa313f0
--- /dev/null
+++ b/doc/theTestableCommand/TheTestableCommand.md
@@ -0,0 +1,122 @@
+# The Testable Command
+
+> This document was started as a result of [Issue #2746 - Improve automated testing](https://github.com/IQSS/dataverse/issues/2746),
+> started by @pdurbin.
+
+* _2016-01-07_ `v3` More tips.
+* _2016-01-07_ `v2` Added references to CI and code coverage. Limited scope to `DvObject`s.
+* _2016-01-03_ `v1` Initial Version
+
+_Michael Bar-Sinai_
+
+Dataverse is a rather complex system, implementing a rather complex set of requirements. There are many moving parts within the application itself, and many moving parts in it's infrastructure (Glassfish, Solr, etc.). Thus, it's hard to detect erroneous behaviors, let alone find a way to reproduce them and spot the point in the code where the failure happens. Moreover, testing a web application requires setup and scripting of its UI or its API - which makes the tests hard to write, laborious to set up, and to make matters worse, brittle. That's not saying that these tests are not important; they are. But it is unrealistic for developers to create, maintain and run these tests very often.
+
+On the other hand, developers can create, maintain and frequently run unit tests.
+
+## Unit Tests
+
+The term "unit test" has been reused, confused and abused since it became popular, so let's start with a definition of what it means in the scope of this document. A unit test is a short piece of code that tests a small and distinct part of a system - the "unit". Executing a unit test does not take long (typically less than a second) and requires no configuration. This implies that during a unit test all activities are limited to the application memory - no reading files, no going to the network, and no querying another process in the same machine.
+
+While they can't replace end-to-end tests, unit tests are a great way to validate small portions of a system and protect against regressions. But having unit tests improves application code in more ways. First off, to have unit tests one needs to have units. That is, the code has to be designed in modules with clear boundaries that can be reused in at least two contexts - run and test. This aids code comprehension and reuse. Unit tests also serve as an example of how the tested units are used by client code, and provide some examples of inputs and outputs. Sort of a poor man's specification document, if you will. Additionally, when writing tests the developer uses a reverse mindset, trying to break her code rather than make it work. This process makes the code much more resilient.
+
+Because unit tests are easy to create (Java only, no configuration needed) and quick to run, it is possible to write many of them, such that many aspects of the code are tested. Normally, a single unit test would test a single use case of the unit. This way, when a unit test fails, the failure describes exactly what part stopped functioning. Other unit tests are not blocked by the failure, and so by running the entire test suite, the developer can get a good overview of which parts are broken and which parts are functioning well.
+
+Because unit tests are easy to execute, it is recommended to get in the habit of running them prior to committing code changes to the repository. These tests are also integrated into Dataverse's automatic build processes (on [Travis-ci](https://travis-ci.org/IQSS/dataverse)). A failed test halts the build. Dataverse's build process also collects data about code coverage during the unit tests, using [Coveralls](https://coveralls.io/github/IQSS/dataverse). While code coverage is a problematic measure for Java EE applications (and has some inherent problems as well), generally speaking larger coverage means better testing.
+
+Unit Testing of application logic in Java EE applications is normally hard to do, as the application logic lives in the service beans, which rely on dependency injections. Writing unit tests for service beans is possible, but as it involves a test container, and a persistent context (read: in-memory database) these unit tests are not very unit-y.
+
+Luckily for Dataverse, most of the application logic regarding `DvObject`s lives in sub-classes of `Command`. As these classes are plain old Java classes that get their service beans through another plain old Java class, `CommandContext`, unit testing them is pretty straightforward. That is, if we write them to be testable.
+
+## Writing Testable Commands
+
+Ideally, commands should only handle domain model objects, such as `DvObject`s. In particular, they should not talk directly to any of the persistence systems (JPA etc.) or rely on objects from the presentation layer, such as `HTTPRequest` or `FacesContext`.
+
+Dataverse has both service beans and commands. When deciding whether an action on the models should go in the bean or in a command, remember that beans are not unit-testible and commands can't talk to JPA. This normally boils down to keeping straightforward storage and retrieval logic in the beans, and possibly complex application logic in the command. We call it the "lean bean" pattern (more about this pattern [in this Java One presentation](http://iqss.github.io/javaone2014-bof5619/)). An application with testable, well-behaved commands will look like the image in figure 1:
+
+
+
+
Fig. 1: Dataverse application layers inside a Java EE container, when commands are testable.
+
+
+
+When testing, the production environment commands live in can be easily replaced by mock objects, as shown in figure 2. The presentation and storage layers are not present. The mock objects implement the same interface as the runtime objects, and thus the command runs during testing exactly as it does in production ([enter your VW joke here](http://www.slate.com/articles/technology/future_tense/2015/09/volkswagen_s_cheating_emissions_software_and_the_threat_of_black_boxes.html)).
+
+
+
+
Fig. 2: Dataverse application layers inside during unit testing, when commands are testable.
+
+
+When a command directly accesses a storage-level module (as figure 3), mocking its environment becomes much harder. While technically possible, creating a mock file system, database, or a remote server is a lot of work and defects the object of creating lots of small tests.
+
+
+
+
+
Fig. 3: A Command that directly accesses storage-level modules is much harder to unit-test.
+
+
+> In the early days of the commands in Dataverse, I've added the `em()` method in the `CommandContext` interface, which allows commands to directly access a JPA entity manager. The idea was to try and remove all service beans, and replace them with commands. That worked, but it made the commands too detailed, and non-testable. So in hind sight, not the best move.
+> If all goes well, `em()` will be removed after we migrate all commands that use it to use service bean methods instead.
+
+## Testing Commands
+
+Writing unit test for a (testable) command is not very different from writing unit tests for other classes. There are some utility code that can be reused for mocking the environment, and a `TestDataverseEngine` class that executes the command in a test context.
+
+A unit test for a command would might follow the below pattern:
+
+1. Set up the domain objects on which the command will work.
+ * Use `MocksFactory` (lives in the test folder, not in src) to create sensible default objects for the business logic context (e.g. metadata blocks and dataverses when testing a `DatasetVersion` object).
+ * Hand-craft the instances needed for the test, to make sure the test case really tests what it needs to test
+1. Create a `TestCommandContext` subclass, and override the methods providing the required service beans. The service beans might need to be subclassed as well, typically replacing database calls with actions on in-memory data structures.
+ * Often, the context can be reused across tests.
+ ````Java
+ class MockDatasetServiceBean extends DatasetServiceBean {
+ @Override
+ public DatasetVersion storeVersion(DatasetVersion dsv) {
+ dsv.setId( nextId() );
+ return dsv;
+ }
+ }
+ final DatasetServiceBeans serviceBean = new MockDatasetServiceBean();
+ CommandContext ctxt = new TestCommandContext(){
+ @Override public DatasetServiceBean datasets() { return serviceBean; }
+ };
+ ````
+1. Create a new `TestDataverseEngine` instance, with the context as a parameter.
+ ````Java
+ TestDataverseEngine testEngine = new TestDataverseEngine( ctxt );
+ ````
+1. Submit the command to the engine
+ ````Java
+ Dataverse result = testEngine.submit(sut);
+ ````
+1. `Assert` all that needs assertion.
+ * It is also possible to assert the permissions required by the command using `TestDataverseEngine#getReqiredPermissionsForObjects`
+
+## Tips for Unit Testing
+
+Numerous blogs, books and tweets have been written about creating good unit tests. Here are some non-exhaustive tips, that might be more relevant to the context of Dataverse and its commands.
+
+* Commands that might throw an `IllegalCommandException` should get a unit test validating that they indeed throw it. Use the `expected` parameter of the `@Test` annotation, like so:
+ ````Java
+ @Test(expected=IllegalCommandException.class)
+ public void testSomethingThatShouldNotBeDoneCantBeDone() throws Exception ...
+ ````
+* The old adage about "testing the right thing, and testing the thing right" holds, and it is good to keep it in mind when asserting equality of complex objects. One common pitfall when testing such objects is to use naïve `toString` on the actual and expected objects, and then test the string for equality. Sadly, this would create many false negatives. The following two XML snippets are semantically equal, but a string-level test would fail:
+
+ ````XML
+
+
+ ````
+
+* The notion of *now* is an issue. Assume that a test needs to validate that the `creationTime` field on some `DvObject` is set to the time it is created. The naïve approach would be storing the time just before the execution of the `Create` command, and then testing that the stored time is equal to the value in the `creationTime`. This approach will fail, seemingly at random, when the command is executed at a different millisecond. The solution is to test for a reasonable delta:
+
+ ````Java
+ assertTrue( Math.abs(System.currentTimeMillis()
+ - result.getCreateDate().toInstant().toEpochMilli()) < 1000 );
+ ````
+
+* Unit tests for Dataverse Commands live [here](/src/test/java/edu/harvard/iq/dataverse/engine/command/impl).
+
+Happy Testing!
+
+-- Michael
diff --git a/doc/theTestableCommand/diagrams.graffle b/doc/theTestableCommand/diagrams.graffle
new file mode 100644
index 00000000000..0bb8ea39d26
--- /dev/null
+++ b/doc/theTestableCommand/diagrams.graffle
@@ -0,0 +1,3878 @@
+
+
+
+
+ ApplicationVersion
+
+ com.omnigroup.OmniGrafflePro
+ 139.18.0.187838
+
+ CreationDate
+ 2016-01-02 21:02:20 +0000
+ Creator
+ Michael Bar-Sinai
+ GraphDocumentVersion
+ 8
+ GuidesLocked
+ NO
+ GuidesVisible
+ YES
+ ImageCounter
+ 1
+ LinksVisible
+ NO
+ MagnetsVisible
+ NO
+ MasterSheets
+
+ ModificationDate
+ 2016-01-03 23:15:46 +0000
+ Modifier
+ Michael Bar-Sinai
+ NotesVisible
+ NO
+ OriginVisible
+ NO
+ PageBreaks
+ YES
+ PrintInfo
+
+ NSBottomMargin
+
+ float
+ 41
+
+ NSHorizonalPagination
+
+ coded
+ BAtzdHJlYW10eXBlZIHoA4QBQISEhAhOU051bWJlcgCEhAdOU1ZhbHVlAISECE5TT2JqZWN0AIWEASqEhAFxlwCG
+
+ NSLeftMargin
+
+ float
+ 18
+
+ NSPaperSize
+
+ size
+ {595, 842}
+
+ NSPrintReverseOrientation
+
+ int
+ 0
+
+ NSRightMargin
+
+ float
+ 18
+
+ NSTopMargin
+
+ float
+ 18
+
+
+ ReadOnly
+ NO
+ Sheets
+
+
+ ActiveLayerIndex
+ 1
+ AutoAdjust
+
+ BackgroundGraphic
+
+ Bounds
+ {{0, 0}, {783, 559}}
+ Class
+ SolidGraphic
+ ID
+ 2
+ Style
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+ BaseZoom
+ 0
+ CanvasOrigin
+ {0, 0}
+ ColumnAlign
+ 1
+ ColumnSpacing
+ 36
+ DisplayScale
+ 1.000 cm = 1.000 cm
+ GraphicsList
+
+
+ Bounds
+ {{249.5, 218}, {95.645057678222656, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-SemiboldCond
+ Size
+ 12
+
+ ID
+ 19
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.289689
+ g
+ 0.897811
+ r
+ 0.315035
+
+
+ stroke
+
+ CornerRadius
+ 14
+ GapRatio
+ 0.5
+ Width
+ 4
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-SemiboldCond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 EjbDataverseEngine}
+
+
+
+ Bounds
+ {{249.49999863559134, 353.99999999999994}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 9
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.181635
+ g
+ 0.657142
+ r
+ 0.214557
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 File System}
+
+
+
+ Bounds
+ {{347.49999332427979, 354.00001474307919}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 8
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.181635
+ g
+ 0.657142
+ r
+ 0.214557
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 Solr}
+
+
+
+ Bounds
+ {{445.49998713703945, 354.00001474307908}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 7
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.181635
+ g
+ 0.657142
+ r
+ 0.214557
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 JPA/SQL}
+
+
+
+ Bounds
+ {{249.5, 286}, {284, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 6
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.158507
+ g
+ 0.776571
+ r
+ 0.218574
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 "Lean" Service Beans}
+
+
+
+ Bounds
+ {{352.01528930664062, 218}, {181.48471069335938, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 5
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.289689
+ g
+ 0.897811
+ r
+ 0.315035
+
+
+ stroke
+
+ CornerRadius
+ 14
+ GapRatio
+ 0.5
+ Width
+ 4
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 Commands}
+
+
+
+ Bounds
+ {{423.5, 150}, {110, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 4
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.4
+ g
+ 1
+ r
+ 0.4
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 API}
+
+
+
+ Bounds
+ {{249.5, 150}, {164, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 3
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.4
+ g
+ 1
+ r
+ 0.4
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 UI}
+
+
+
+ Bounds
+ {{143.75000154712973, 405.95993521291189}, {65.5, 25}}
+ Class
+ ShapedGraphic
+ FitText
+ Vertical
+ Flow
+ Resize
+ FontInfo
+
+ Color
+
+ b
+ 0
+ g
+ 0
+ r
+ 0
+
+ Font
+ MyriadPro-Cond
+ Size
+ 12
+
+ ID
+ 22
+ Layer
+ 1
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Rotation
+ 270
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Cond;}
+{\colortbl;\red255\green255\blue255;\red0\green0\blue0;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf2 Storage}
+
+ TextPlacement
+ 2
+
+
+ Bounds
+ {{163.23664118803072, 385.70992356409215}, {394.5, 65}}
+ Class
+ ShapedGraphic
+ ID
+ 21
+ Layer
+ 1
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.509983
+ g
+ 0.311949
+ r
+ 0.25906
+
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+
+ Bounds
+ {{145, 116.5}, {75, 39}}
+ Class
+ ShapedGraphic
+ FitText
+ Vertical
+ Flow
+ Resize
+ FontInfo
+
+ Font
+ MyriadPro-Cond
+ Size
+ 12
+
+ ID
+ 17
+ Layer
+ 1
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Rotation
+ 270
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Cond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 Presentation\
+(html, Json, etc.)}
+
+ TextPlacement
+ 2
+
+
+ Bounds
+ {{163, 98.5}, {394.5, 75}}
+ Class
+ ShapedGraphic
+ ID
+ 16
+ Layer
+ 1
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 0
+
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+
+ Bounds
+ {{149.75, 329.75}, {65.5, 39}}
+ Class
+ ShapedGraphic
+ FitText
+ Vertical
+ Flow
+ Resize
+ FontInfo
+
+ Font
+ MyriadPro-Cond
+ Size
+ 12
+
+ ID
+ 15
+ Layer
+ 1
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Rotation
+ 270
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Cond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 Storage\
+Protocols}
+
+ TextPlacement
+ 2
+
+
+ Bounds
+ {{163, 317}, {394.5, 65}}
+ Class
+ ShapedGraphic
+ ID
+ 14
+ Layer
+ 1
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.655716
+ g
+ 0.409186
+ r
+ 0.329095
+
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+
+ Bounds
+ {{113.75, 225.75}, {137.5, 39}}
+ Class
+ ShapedGraphic
+ FitText
+ Vertical
+ Flow
+ Resize
+ FontInfo
+
+ Font
+ MyriadPro-Cond
+ Size
+ 12
+
+ ID
+ 13
+ Layer
+ 1
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Rotation
+ 270
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Cond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 Models\
+(DvObjects et al.)}
+
+ TextPlacement
+ 2
+
+
+ Bounds
+ {{163, 177}, {394.5, 136.5}}
+ Class
+ ShapedGraphic
+ ID
+ 12
+ Layer
+ 1
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 1
+ g
+ 0.8
+ r
+ 0.4
+
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+
+ GridInfo
+
+ HPages
+ 1
+ KeepToScale
+
+ Layers
+
+
+ Lock
+ NO
+ Name
+ Layers
+ Print
+ YES
+ View
+ YES
+
+
+ Lock
+ NO
+ Name
+ Ranges
+ Print
+ YES
+ View
+ YES
+
+
+ Lock
+ NO
+ Name
+ Layer 2
+ Print
+ YES
+ View
+ YES
+
+
+ LayoutInfo
+
+ Animate
+ NO
+ circoMinDist
+ 18
+ circoSeparation
+ 0.0
+ layoutEngine
+ dot
+ neatoSeparation
+ 0.0
+ twopiSeparation
+ 0.0
+
+ Orientation
+ 1
+ PrintOnePage
+
+ RowAlign
+ 1
+ RowSpacing
+ 36
+ SheetTitle
+ Testable in Container
+ UniqueID
+ 1
+ VPages
+ 1
+
+
+ ActiveLayerIndex
+ 1
+ AutoAdjust
+
+ BackgroundGraphic
+
+ Bounds
+ {{0, 0}, {783, 559}}
+ Class
+ SolidGraphic
+ ID
+ 2
+ Style
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+ BaseZoom
+ 0
+ CanvasOrigin
+ {0, 0}
+ ColumnAlign
+ 1
+ ColumnSpacing
+ 36
+ DisplayScale
+ 1.000 cm = 1.000 cm
+ GraphicsList
+
+
+ Bounds
+ {{249.5, 218}, {95.645057678222656, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ b
+ 0
+ g
+ 1
+ r
+ 0
+
+ Font
+ MyriadPro-SemiboldCond
+ Size
+ 12
+
+ ID
+ 19
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0
+ g
+ 0.50706
+ r
+ 0
+
+
+ stroke
+
+ Color
+
+ b
+ 0
+ g
+ 1
+ r
+ 0
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-SemiboldCond;}
+{\colortbl;\red255\green255\blue255;\red0\green255\blue0;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf2 TestDataverseEngine}
+
+
+
+ Bounds
+ {{249.49999863559134, 353.99999999999994}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 1
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 24
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.78159
+ g
+ 0.78159
+ r
+ 0.78159
+
+
+ shadow
+
+ ShadowVector
+ {0, 2}
+
+ stroke
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 1
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf1 File System}
+
+
+
+ Bounds
+ {{347.49999332427979, 354.00001474307919}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 1
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 25
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.78159
+ g
+ 0.78159
+ r
+ 0.78159
+
+
+ shadow
+
+ ShadowVector
+ {0, 2}
+
+ stroke
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 1
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf1 Solr}
+
+
+
+ Bounds
+ {{445.49998713703945, 354.00001474307908}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 1
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 23
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.78159
+ g
+ 0.78159
+ r
+ 0.78159
+
+
+ shadow
+
+ ShadowVector
+ {0, 2}
+
+ stroke
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 1
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf1 JPA/SQL}
+
+
+
+ Bounds
+ {{249.5, 286}, {284, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ b
+ 0
+ g
+ 1
+ r
+ 0
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 6
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0
+ g
+ 0.50706
+ r
+ 0
+
+
+ stroke
+
+ Color
+
+ b
+ 0
+ g
+ 1
+ r
+ 0
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;\red0\green255\blue0;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf2 Mock "Lean" Service Beans}
+
+
+
+ Bounds
+ {{352.01528930664062, 218}, {181.48471069335938, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 5
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.289689
+ g
+ 0.897811
+ r
+ 0.315035
+
+
+ stroke
+
+ CornerRadius
+ 14
+ GapRatio
+ 0.5
+ Width
+ 4
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 Commands}
+
+
+
+ Bounds
+ {{423.5, 150}, {110, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 1
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 4
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.78159
+ g
+ 0.78159
+ r
+ 0.78159
+
+
+ shadow
+
+ ShadowVector
+ {0, 2}
+
+ stroke
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 1
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf1 API}
+
+
+
+ Bounds
+ {{249.5, 150}, {164, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 1
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 3
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.78159
+ g
+ 0.78159
+ r
+ 0.78159
+
+
+ shadow
+
+ ShadowVector
+ {0, 2}
+
+ stroke
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 1
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf1 UI}
+
+
+
+ Bounds
+ {{113.75, 225.75}, {137.5, 39}}
+ Class
+ ShapedGraphic
+ FitText
+ Vertical
+ Flow
+ Resize
+ FontInfo
+
+ Font
+ MyriadPro-Cond
+ Size
+ 12
+
+ ID
+ 13
+ Layer
+ 1
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Rotation
+ 270
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Cond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 Models\
+(DvObjects et al.)}
+
+ TextPlacement
+ 2
+
+
+ Bounds
+ {{163, 177}, {394.5, 136.5}}
+ Class
+ ShapedGraphic
+ ID
+ 12
+ Layer
+ 1
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 1
+ g
+ 0.8
+ r
+ 0.4
+
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+
+ GridInfo
+
+ HPages
+ 1
+ KeepToScale
+
+ Layers
+
+
+ Lock
+ NO
+ Name
+ Layers
+ Print
+ YES
+ View
+ YES
+
+
+ Lock
+ NO
+ Name
+ Ranges
+ Print
+ YES
+ View
+ YES
+
+
+ Lock
+ NO
+ Name
+ Layer 2
+ Print
+ YES
+ View
+ YES
+
+
+ LayoutInfo
+
+ Animate
+ NO
+ circoMinDist
+ 18
+ circoSeparation
+ 0.0
+ layoutEngine
+ dot
+ neatoSeparation
+ 0.0
+ twopiSeparation
+ 0.0
+
+ Orientation
+ 1
+ PrintOnePage
+
+ RowAlign
+ 1
+ RowSpacing
+ 36
+ SheetTitle
+ Testable in UT
+ UniqueID
+ 4
+ VPages
+ 1
+
+
+ ActiveLayerIndex
+ 0
+ AutoAdjust
+
+ BackgroundGraphic
+
+ Bounds
+ {{0, 0}, {783, 559}}
+ Class
+ SolidGraphic
+ ID
+ 2
+ Style
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+ BaseZoom
+ 0
+ CanvasOrigin
+ {0, 0}
+ ColumnAlign
+ 1
+ ColumnSpacing
+ 36
+ DisplayScale
+ 1.000 cm = 1.000 cm
+ ExportShapes
+
+
+ InspectorGroup
+ 255
+ ShapeImageRect
+ {{2, 2}, {22, 22}}
+ ShapeName
+ 126F0C66-5EA2-4018-99ED-8BF8640043EC-74214-0002475ECCB4BDEE
+ ShouldExport
+ YES
+ StrokePath
+
+ elements
+
+
+ element
+ MOVETO
+ point
+ {0.24028908637789304, -0.052845539704162281}
+
+
+ element
+ LINETO
+ point
+ {0.24028908637789304, 0.38617886465196793}
+
+
+ control1
+ {0.24028908637789304, 0.44904054191311271}
+ control2
+ {0.27482650541678622, 0.49999999999999956}
+ element
+ CURVETO
+ point
+ {0.31743057089476068, 0.49999999999999956}
+
+
+ element
+ LINETO
+ point
+ {0.42285845329583394, 0.49999999999999956}
+
+
+ control1
+ {0.46546251877380795, 0.49999999999999956}
+ control2
+ {0.49999993781270158, 0.44904054191311271}
+ element
+ CURVETO
+ point
+ {0.49999993781270158, 0.38617886465196793}
+
+
+ element
+ LINETO
+ point
+ {0.49999993781270158, -0.16652059285842347}
+
+
+ control1
+ {0.49999997927008444, -0.16656927976700198}
+ control2
+ {0.5, -0.16661797383764565}
+ element
+ CURVETO
+ point
+ {0.5, -0.16666667505219346}
+
+
+ element
+ LINETO
+ point
+ {0.5, -0.38617886465196882}
+
+
+ control1
+ {0.5, -0.44904054191311338}
+ control2
+ {0.46546258096110638, -0.50000000000000022}
+ element
+ CURVETO
+ point
+ {0.42285851548313236, -0.50000000000000022}
+
+
+ element
+ LINETO
+ point
+ {-0.42285851548313191, -0.50000000000000022}
+
+
+ control1
+ {-0.46546258096110638, -0.50000000000000022}
+ control2
+ {-0.49999999999999978, -0.44904054191311338}
+ element
+ CURVETO
+ point
+ {-0.49999999999999978, -0.38617886465196882}
+
+
+ element
+ LINETO
+ point
+ {-0.49999999999999978, -0.16666667505219346}
+
+
+ control1
+ {-0.49999999999999978, -0.10380499779104913}
+ control2
+ {-0.46546258096110638, -0.052845539704162281}
+ element
+ CURVETO
+ point
+ {-0.42285851548313191, -0.052845539704162281}
+
+
+ element
+ CLOSE
+
+
+ element
+ MOVETO
+ point
+ {0.24028908637789304, -0.052845539704162281}
+
+
+
+ TextBounds
+ {{0, 0}, {1, 1}}
+
+
+ GraphicsList
+
+
+ Bounds
+ {{408.25762102002579, 231.98663496839191}, {69, 25}}
+ Class
+ ShapedGraphic
+ FitText
+ YES
+ Flow
+ Resize
+ ID
+ 25
+ Layer
+ 0
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 Commands}
+
+ TextPlacement
+ 2
+ Wrap
+ NO
+
+
+ Bounds
+ {{249.5, 218}, {95.645057678222656, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-SemiboldCond
+ Size
+ 12
+
+ ID
+ 19
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.289689
+ g
+ 0.897811
+ r
+ 0.315035
+
+
+ stroke
+
+ CornerRadius
+ 14
+ GapRatio
+ 0.5
+ Width
+ 4
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-SemiboldCond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 EjbDataverseEngine}
+
+
+
+ Bounds
+ {{249.50000549142089, 354}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 9
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.181635
+ g
+ 0.657142
+ r
+ 0.214557
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 File System}
+
+
+
+ Bounds
+ {{347.50002211461873, 354}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 8
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.181635
+ g
+ 0.657142
+ r
+ 0.214557
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 Solr}
+
+
+
+ Bounds
+ {{445.50002463092824, 353.9999876825363}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 7
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.181635
+ g
+ 0.657142
+ r
+ 0.214557
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 JPA/SQL}
+
+
+
+ Bounds
+ {{249.5, 286}, {231.52293395996094, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 6
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.158507
+ g
+ 0.776571
+ r
+ 0.218574
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 "Lean" Service Beans}
+
+
+
+ Bounds
+ {{352.01528930664062, 218}, {181.48471069335938, 123.0000030942594}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 0
+
+ Font
+ MyriadPro-Semibold
+ NSKern
+ 0.0
+ Size
+ 12
+
+ ID
+ 26
+ Layer
+ 0
+ Shape
+ 126F0C66-5EA2-4018-99ED-8BF8640043EC-74214-0002475ECCB4BDEE
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.289689
+ g
+ 0.897811
+ r
+ 0.315035
+
+
+ stroke
+
+ GapRatio
+ 0.5
+ Width
+ 4
+
+
+
+
+ Bounds
+ {{423.5, 150}, {110, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 4
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.4
+ g
+ 1
+ r
+ 0.4
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 API}
+
+
+
+ Bounds
+ {{249.5, 150}, {164, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 3
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.4
+ g
+ 1
+ r
+ 0.4
+
+
+ stroke
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 UI}
+
+
+
+ Bounds
+ {{143.75000154712973, 405.95993521291189}, {65.5, 25}}
+ Class
+ ShapedGraphic
+ FitText
+ Vertical
+ Flow
+ Resize
+ FontInfo
+
+ Color
+
+ b
+ 0
+ g
+ 0
+ r
+ 0
+
+ Font
+ MyriadPro-Cond
+ Size
+ 12
+
+ ID
+ 22
+ Layer
+ 1
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Rotation
+ 270
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Cond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 Storage}
+
+ TextPlacement
+ 2
+
+
+ Bounds
+ {{163.23664118803072, 385.70992356409215}, {394.5, 65}}
+ Class
+ ShapedGraphic
+ ID
+ 21
+ Layer
+ 1
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.509983
+ g
+ 0.311949
+ r
+ 0.25906
+
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+
+ Bounds
+ {{145, 116.5}, {75, 39}}
+ Class
+ ShapedGraphic
+ FitText
+ Vertical
+ Flow
+ Resize
+ FontInfo
+
+ Font
+ MyriadPro-Cond
+ Size
+ 12
+
+ ID
+ 17
+ Layer
+ 1
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Rotation
+ 270
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Cond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 Presentation\
+(html, Json, etc.)}
+
+ TextPlacement
+ 2
+
+
+ Bounds
+ {{163, 98.5}, {394.5, 75}}
+ Class
+ ShapedGraphic
+ ID
+ 16
+ Layer
+ 1
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 0
+
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+
+ Bounds
+ {{149.75, 329.75}, {65.5, 39}}
+ Class
+ ShapedGraphic
+ FitText
+ Vertical
+ Flow
+ Resize
+ FontInfo
+
+ Font
+ MyriadPro-Cond
+ Size
+ 12
+
+ ID
+ 15
+ Layer
+ 1
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Rotation
+ 270
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Cond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 Storage\
+Protocols}
+
+ TextPlacement
+ 2
+
+
+ Bounds
+ {{163, 317}, {394.5, 65}}
+ Class
+ ShapedGraphic
+ ID
+ 14
+ Layer
+ 1
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.655716
+ g
+ 0.409186
+ r
+ 0.329095
+
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+
+ Bounds
+ {{113.75, 225.75}, {137.5, 39}}
+ Class
+ ShapedGraphic
+ FitText
+ Vertical
+ Flow
+ Resize
+ FontInfo
+
+ Font
+ MyriadPro-Cond
+ Size
+ 12
+
+ ID
+ 13
+ Layer
+ 1
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Rotation
+ 270
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Cond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 Models\
+(DvObjects et al.)}
+
+ TextPlacement
+ 2
+
+
+ Bounds
+ {{163, 177}, {394.5, 136.5}}
+ Class
+ ShapedGraphic
+ ID
+ 12
+ Layer
+ 1
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 1
+ g
+ 0.8
+ r
+ 0.4
+
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+
+ GridInfo
+
+ HPages
+ 1
+ KeepToScale
+
+ Layers
+
+
+ Lock
+ NO
+ Name
+ Layers
+ Print
+ YES
+ View
+ YES
+
+
+ Lock
+ NO
+ Name
+ Ranges
+ Print
+ YES
+ View
+ YES
+
+
+ Lock
+ NO
+ Name
+ Layer 2
+ Print
+ YES
+ View
+ YES
+
+
+ LayoutInfo
+
+ Animate
+ NO
+ circoMinDist
+ 18
+ circoSeparation
+ 0.0
+ layoutEngine
+ dot
+ neatoSeparation
+ 0.0
+ twopiSeparation
+ 0.0
+
+ Orientation
+ 1
+ PrintOnePage
+
+ RowAlign
+ 1
+ RowSpacing
+ 36
+ SheetTitle
+ Non-testable in container
+ UniqueID
+ 3
+ VPages
+ 1
+
+
+ ActiveLayerIndex
+ 0
+ AutoAdjust
+
+ BackgroundGraphic
+
+ Bounds
+ {{0, 0}, {783, 559}}
+ Class
+ SolidGraphic
+ ID
+ 2
+ Style
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+ BaseZoom
+ 0
+ CanvasOrigin
+ {0, 0}
+ ColumnAlign
+ 1
+ ColumnSpacing
+ 36
+ DisplayScale
+ 1.000 cm = 1.000 cm
+ ExportShapes
+
+
+ InspectorGroup
+ 255
+ ShapeImageRect
+ {{2, 2}, {22, 22}}
+ ShapeName
+ 126F0C66-5EA2-4018-99ED-8BF8640043EC-74214-0002475ECCB4BDEE
+ ShouldExport
+ YES
+ StrokePath
+
+ elements
+
+
+ element
+ MOVETO
+ point
+ {0.24028908637789304, -0.052845539704162281}
+
+
+ element
+ LINETO
+ point
+ {0.24028908637789304, 0.38617886465196793}
+
+
+ control1
+ {0.24028908637789304, 0.44904054191311271}
+ control2
+ {0.27482650541678622, 0.49999999999999956}
+ element
+ CURVETO
+ point
+ {0.31743057089476068, 0.49999999999999956}
+
+
+ element
+ LINETO
+ point
+ {0.42285845329583394, 0.49999999999999956}
+
+
+ control1
+ {0.46546251877380795, 0.49999999999999956}
+ control2
+ {0.49999993781270158, 0.44904054191311271}
+ element
+ CURVETO
+ point
+ {0.49999993781270158, 0.38617886465196793}
+
+
+ element
+ LINETO
+ point
+ {0.49999993781270158, -0.16652059285842347}
+
+
+ control1
+ {0.49999997927008444, -0.16656927976700198}
+ control2
+ {0.5, -0.16661797383764565}
+ element
+ CURVETO
+ point
+ {0.5, -0.16666667505219346}
+
+
+ element
+ LINETO
+ point
+ {0.5, -0.38617886465196882}
+
+
+ control1
+ {0.5, -0.44904054191311338}
+ control2
+ {0.46546258096110638, -0.50000000000000022}
+ element
+ CURVETO
+ point
+ {0.42285851548313236, -0.50000000000000022}
+
+
+ element
+ LINETO
+ point
+ {-0.42285851548313191, -0.50000000000000022}
+
+
+ control1
+ {-0.46546258096110638, -0.50000000000000022}
+ control2
+ {-0.49999999999999978, -0.44904054191311338}
+ element
+ CURVETO
+ point
+ {-0.49999999999999978, -0.38617886465196882}
+
+
+ element
+ LINETO
+ point
+ {-0.49999999999999978, -0.16666667505219346}
+
+
+ control1
+ {-0.49999999999999978, -0.10380499779104913}
+ control2
+ {-0.46546258096110638, -0.052845539704162281}
+ element
+ CURVETO
+ point
+ {-0.42285851548313191, -0.052845539704162281}
+
+
+ element
+ CLOSE
+
+
+ element
+ MOVETO
+ point
+ {0.24028908637789304, -0.052845539704162281}
+
+
+
+ TextBounds
+ {{0, 0}, {1, 1}}
+
+
+ GraphicsList
+
+
+ Bounds
+ {{473.28246342094639, 331.00001650138393}, {69, 29.30152702331543}}
+ Class
+ ShapedGraphic
+ ID
+ 33
+ Layer
+ 0
+ Shape
+ Bolt
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0
+ g
+ 0
+ r
+ 1
+
+
+ shadow
+
+ Beneath
+ YES
+ Color
+
+ a
+ 0.75
+ b
+ 0
+ g
+ 1
+ r
+ 1
+
+ ShadowVector
+ {0, 0}
+
+ stroke
+
+ Color
+
+ b
+ 0
+ g
+ 1
+ r
+ 1
+
+
+
+
+
+ Bounds
+ {{249.5, 218}, {95.645057678222656, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ b
+ 0
+ g
+ 1
+ r
+ 0
+
+ Font
+ MyriadPro-SemiboldCond
+ Size
+ 12
+
+ ID
+ 19
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0
+ g
+ 0.50706
+ r
+ 0
+
+
+ stroke
+
+ Color
+
+ b
+ 0
+ g
+ 1
+ r
+ 0
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-SemiboldCond;}
+{\colortbl;\red255\green255\blue255;\red0\green255\blue0;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf2 TestDataverseEngine}
+
+
+
+ Bounds
+ {{249.49999863559134, 353.99999999999994}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 1
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 24
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.78159
+ g
+ 0.78159
+ r
+ 0.78159
+
+
+ shadow
+
+ ShadowVector
+ {0, 2}
+
+ stroke
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 1
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf1 File System}
+
+
+
+ Bounds
+ {{347.49999332427979, 354.00001474307919}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 1
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 25
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.78159
+ g
+ 0.78159
+ r
+ 0.78159
+
+
+ shadow
+
+ ShadowVector
+ {0, 2}
+
+ stroke
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 1
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf1 Solr}
+
+
+
+ Bounds
+ {{445.49998713703945, 354.00001474307908}, {88, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 1
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 23
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.78159
+ g
+ 0.78159
+ r
+ 0.78159
+
+
+ shadow
+
+ ShadowVector
+ {0, 2}
+
+ stroke
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 1
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf1 JPA/SQL}
+
+
+
+ Bounds
+ {{249.5, 286}, {227.75762102002579, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ b
+ 0
+ g
+ 1
+ r
+ 0
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 6
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0
+ g
+ 0.50706
+ r
+ 0
+
+
+ stroke
+
+ Color
+
+ b
+ 0
+ g
+ 1
+ r
+ 0
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;\red0\green255\blue0;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf2 Mock "Lean" Service Beans}
+
+
+
+ Bounds
+ {{423.5, 150}, {110, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 1
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 4
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.78159
+ g
+ 0.78159
+ r
+ 0.78159
+
+
+ shadow
+
+ ShadowVector
+ {0, 2}
+
+ stroke
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 1
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf1 API}
+
+
+
+ Bounds
+ {{249.5, 150}, {164, 55}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 1
+
+ Font
+ MyriadPro-Semibold
+ Size
+ 12
+
+ ID
+ 3
+ Layer
+ 0
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.78159
+ g
+ 0.78159
+ r
+ 0.78159
+
+
+ shadow
+
+ ShadowVector
+ {0, 2}
+
+ stroke
+
+ Color
+
+ b
+ 1
+ g
+ 1
+ r
+ 1
+
+ CornerRadius
+ 14
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf1 UI}
+
+
+
+ Bounds
+ {{408.25762102002579, 231.98663496839191}, {69, 25}}
+ Class
+ ShapedGraphic
+ FitText
+ YES
+ Flow
+ Resize
+ ID
+ 32
+ Layer
+ 0
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Semibold;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\b\fs24 \cf0 Commands}
+
+ TextPlacement
+ 2
+ Wrap
+ NO
+
+
+ Bounds
+ {{352.01528930664062, 218}, {181.48471069335938, 123.0000030942594}}
+ Class
+ ShapedGraphic
+ FontInfo
+
+ Color
+
+ w
+ 0
+
+ Font
+ MyriadPro-Semibold
+ NSKern
+ 0.0
+ Size
+ 12
+
+ ID
+ 26
+ Layer
+ 0
+ Shape
+ 126F0C66-5EA2-4018-99ED-8BF8640043EC-74214-0002475ECCB4BDEE
+ Style
+
+ fill
+
+ Color
+
+ b
+ 0.289689
+ g
+ 0.897811
+ r
+ 0.315035
+
+
+ stroke
+
+ GapRatio
+ 0.5
+ Width
+ 4
+
+
+
+
+ Bounds
+ {{113.75, 225.75}, {137.5, 39}}
+ Class
+ ShapedGraphic
+ FitText
+ Vertical
+ Flow
+ Resize
+ FontInfo
+
+ Font
+ MyriadPro-Cond
+ Size
+ 12
+
+ ID
+ 13
+ Layer
+ 1
+ Magnets
+
+ {0, 1}
+ {0, -1}
+ {1, 0}
+ {-1, 0}
+
+ Rotation
+ 270
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Draws
+ NO
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+ Text
+
+ Text
+ {\rtf1\ansi\ansicpg1252\cocoartf1404\cocoasubrtf340
+\cocoascreenfonts1{\fonttbl\f0\fnil\fcharset0 MyriadPro-Cond;}
+{\colortbl;\red255\green255\blue255;}
+\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\pardirnatural\qc
+
+\f0\fs24 \cf0 Models\
+(DvObjects et al.)}
+
+ TextPlacement
+ 2
+
+
+ Bounds
+ {{163, 177}, {394.5, 136.5}}
+ Class
+ ShapedGraphic
+ ID
+ 12
+ Layer
+ 1
+ Shape
+ Rectangle
+ Style
+
+ fill
+
+ Color
+
+ b
+ 1
+ g
+ 0.8
+ r
+ 0.4
+
+
+ shadow
+
+ Draws
+ NO
+
+ stroke
+
+ Draws
+ NO
+
+
+
+
+ GridInfo
+
+ HPages
+ 1
+ KeepToScale
+
+ Layers
+
+
+ Lock
+ NO
+ Name
+ Layers
+ Print
+ YES
+ View
+ YES
+
+
+ Lock
+ NO
+ Name
+ Ranges
+ Print
+ YES
+ View
+ YES
+
+
+ Lock
+ NO
+ Name
+ Layer 2
+ Print
+ YES
+ View
+ YES
+
+
+ LayoutInfo
+
+ Animate
+ NO
+ circoMinDist
+ 18
+ circoSeparation
+ 0.0
+ layoutEngine
+ dot
+ neatoSeparation
+ 0.0
+ twopiSeparation
+ 0.0
+
+ Orientation
+ 1
+ PrintOnePage
+
+ RowAlign
+ 1
+ RowSpacing
+ 36
+ SheetTitle
+ Non-Testable in UT 2
+ UniqueID
+ 5
+ VPages
+ 1
+
+
+ SmartAlignmentGuidesActive
+ YES
+ SmartDistanceGuidesActive
+ YES
+ UseEntirePage
+
+ WindowInfo
+
+ CurrentSheet
+ 3
+ ExpandedCanvases
+
+
+ name
+ Testable in Container
+
+
+ name
+ Non-testable in container
+
+
+ name
+ Non-Testable in UT 2
+
+
+ Frame
+ {{2626, 2}, {1160, 875}}
+ ListView
+
+ OutlineWidth
+ 142
+ RightSidebar
+
+ ShowRuler
+
+ Sidebar
+
+ SidebarWidth
+ 120
+ VisibleRegion
+ {{0, 0}, {782.44278226850008, 559.54200917347373}}
+ Zoom
+ 1.309999942779541
+ ZoomValues
+
+
+ Testable in Container
+ 1.309999942779541
+ 1.2899999618530273
+
+
+ Non-testable in container
+ 1.309999942779541
+ 1
+
+
+ Testable in UT
+ 1.309999942779541
+ 1
+
+
+ Non-Testable in UT 2
+ 1.309999942779541
+ 1
+
+
+
+
+
diff --git a/doc/theTestableCommand/non-testable-container.png b/doc/theTestableCommand/non-testable-container.png
new file mode 100644
index 00000000000..0e9f16ba208
Binary files /dev/null and b/doc/theTestableCommand/non-testable-container.png differ
diff --git a/doc/theTestableCommand/non-testable-ut.png b/doc/theTestableCommand/non-testable-ut.png
new file mode 100644
index 00000000000..c4de46d1909
Binary files /dev/null and b/doc/theTestableCommand/non-testable-ut.png differ
diff --git a/doc/theTestableCommand/testable-container.png b/doc/theTestableCommand/testable-container.png
new file mode 100644
index 00000000000..e00569ba88e
Binary files /dev/null and b/doc/theTestableCommand/testable-container.png differ
diff --git a/doc/theTestableCommand/testable-ut.png b/doc/theTestableCommand/testable-ut.png
new file mode 100644
index 00000000000..52def9583eb
Binary files /dev/null and b/doc/theTestableCommand/testable-ut.png differ
diff --git a/local_lib/com/lyncode/xoai-common/4.1.0-header-patch/xoai-common-4.1.0-header-patch.jar b/local_lib/com/lyncode/xoai-common/4.1.0-header-patch/xoai-common-4.1.0-header-patch.jar
new file mode 100644
index 00000000000..a23530b895c
Binary files /dev/null and b/local_lib/com/lyncode/xoai-common/4.1.0-header-patch/xoai-common-4.1.0-header-patch.jar differ
diff --git a/local_lib/com/lyncode/xoai-common/4.1.0-header-patch/xoai-common-4.1.0-header-patch.pom b/local_lib/com/lyncode/xoai-common/4.1.0-header-patch/xoai-common-4.1.0-header-patch.pom
new file mode 100644
index 00000000000..2915745c27d
--- /dev/null
+++ b/local_lib/com/lyncode/xoai-common/4.1.0-header-patch/xoai-common-4.1.0-header-patch.pom
@@ -0,0 +1,77 @@
+
+
+
+ xoai
+ com.lyncode
+ 4.1.0
+
+ 4.0.0
+
+ XOAI Commons
+ xoai-common
+ 4.1.0-header-patch
+
+
+
+ com.lyncode
+ xml-io
+
+
+ com.lyncode
+ test-support
+
+
+
+ commons-codec
+ commons-codec
+
+
+
+ commons-io
+ commons-io
+
+
+
+ com.google.guava
+ guava
+
+
+
+ xml-apis
+ xml-apis
+
+
+
+ org.hamcrest
+ hamcrest-all
+
+
+
+ org.codehaus.woodstox
+ stax2-api
+
+
+
+ javax.xml.stream
+ stax-api
+
+
+
+ org.apache.commons
+ commons-lang3
+
+
+
+ stax
+ stax-api
+
+
+
+ junit
+ junit
+ test
+
+
+
+
+
diff --git a/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch-javadoc.jar b/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch-javadoc.jar
new file mode 100644
index 00000000000..28e5da7b0d6
Binary files /dev/null and b/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch-javadoc.jar differ
diff --git a/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch-sources.jar b/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch-sources.jar
new file mode 100644
index 00000000000..bdec990e2c6
Binary files /dev/null and b/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch-sources.jar differ
diff --git a/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch.jar b/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch.jar
new file mode 100644
index 00000000000..331c9a80cd1
Binary files /dev/null and b/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch.jar differ
diff --git a/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch.pom b/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch.pom
new file mode 100644
index 00000000000..87d67b8c4a7
--- /dev/null
+++ b/local_lib/com/lyncode/xoai-data-provider/4.1.0-header-patch/xoai-data-provider-4.1.0-header-patch.pom
@@ -0,0 +1,54 @@
+
+
+
+ xoai
+ com.lyncode
+ 4.1.0-header-patch
+
+
+ 4.0.0
+
+ XOAI Data Provider
+ xoai-data-provider
+ 4.1.0-header-patch
+
+
+
+ com.lyncode
+ xoai-common
+ ${project.version}
+
+
+
+ log4j
+ log4j
+
+
+
+ com.google.guava
+ guava
+
+
+
+ com.lyncode
+ builder-commons
+
+
+
+ org.apache.commons
+ commons-lang3
+
+
+
+ org.mockito
+ mockito-all
+ test
+
+
+
+ junit
+ junit
+ test
+
+
+
diff --git a/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch-javadoc.jar b/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch-javadoc.jar
new file mode 100644
index 00000000000..4382b3ded5d
Binary files /dev/null and b/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch-javadoc.jar differ
diff --git a/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch-sources.jar b/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch-sources.jar
new file mode 100644
index 00000000000..314dad81872
Binary files /dev/null and b/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch-sources.jar differ
diff --git a/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch.jar b/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch.jar
new file mode 100644
index 00000000000..781fc1ce1e2
Binary files /dev/null and b/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch.jar differ
diff --git a/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch.pom b/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch.pom
new file mode 100644
index 00000000000..c45e15a91f9
--- /dev/null
+++ b/local_lib/com/lyncode/xoai-service-provider/4.1.0-header-patch/xoai-service-provider-4.1.0-header-patch.pom
@@ -0,0 +1,67 @@
+
+
+
+ xoai
+ com.lyncode
+ 4.1.0-header-patch
+
+ 4.0.0
+
+ XOAI Service Provider
+ xoai-service-provider
+ 4.1.0-header-patch
+
+
+
+ com.lyncode
+ xoai-common
+ ${project.version}
+
+
+
+ com.lyncode
+ xml-io
+
+
+
+ log4j
+ log4j
+
+
+
+ org.apache.commons
+ commons-lang3
+
+
+
+ org.apache.httpcomponents
+ httpclient
+
+
+
+ org.codehaus.woodstox
+ wstx-asl
+
+
+
+
+ com.lyncode
+ xoai-data-provider
+ ${project.version}
+ test
+
+
+
+ org.mockito
+ mockito-all
+ test
+
+
+
+ junit
+ junit
+ test
+
+
+
+
diff --git a/local_lib/com/lyncode/xoai/4.1.0-header-patch/xoai-4.1.0-header-patch.pom b/local_lib/com/lyncode/xoai/4.1.0-header-patch/xoai-4.1.0-header-patch.pom
new file mode 100644
index 00000000000..9e0d802244c
--- /dev/null
+++ b/local_lib/com/lyncode/xoai/4.1.0-header-patch/xoai-4.1.0-header-patch.pom
@@ -0,0 +1,273 @@
+
+ 4.0.0
+ pom
+
+
+ xoai-common
+ xoai-data-provider
+ xoai-service-provider
+
+
+
+ org.sonatype.oss
+ oss-parent
+ 7
+
+
+ com.lyncode
+ xoai
+ 4.1.0-header-patch
+
+ XOAI : OAI-PMH Java Toolkit
+ http://www.lyncode.com
+
+
+ 1.9.5
+ 15.0
+ 3.1
+ 1.2.14
+ 4.2.1
+ 4.0.0
+
+ 1.0.2
+ 1.0.3
+ 1.0.4
+
+
+
+
+ The Apache Software License, Version 2.0
+ http://www.apache.org/licenses/LICENSE-2.0.txt
+ repo
+
+
+
+
+ scm:git:git@github.com:lyncode/xoai.git
+ scm:git:git@github.com:lyncode/xoai.git
+ git@github.com:lyncode/xoai.git
+ xoai-4.1.0
+
+
+
+
+ ossrh
+ https://oss.sonatype.org/content/repositories/snapshots
+
+
+ ossrh
+ https://oss.sonatype.org/service/local/staging/deploy/maven2/
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-gpg-plugin
+ 1.5
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ 2.8.1
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+ 2.2.1
+
+
+ org.apache.maven.plugins
+ maven-release-plugin
+ 2.5
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.1
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-release-plugin
+
+ true
+ false
+ release
+ deploy
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+ 1.6
+ 1.6
+ false
+ false
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ true
+
+
+ attach-javadocs
+
+ jar
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+ true
+
+
+ attach-sources
+
+ jar
+
+
+
+
+
+
+
+
+
+
+ com.lyncode
+ xml-io
+ ${lyncode.xml-io}
+
+
+
+ com.lyncode
+ test-support
+ ${lyncode.test-support}
+
+
+
+
+ log4j
+ log4j
+ ${log4j.version}
+
+
+
+ org.apache.commons
+ commons-lang3
+ ${commons.lang3.version}
+
+
+
+ org.apache.httpcomponents
+ httpclient
+ ${http-commons.version}
+
+
+
+ org.codehaus.woodstox
+ wstx-asl
+ ${woodstox.version}
+
+
+
+ org.codehaus.woodstox
+ stax2-api
+ 3.0.4
+
+
+
+ commons-codec
+ commons-codec
+ 1.3
+
+
+ org.hamcrest
+ hamcrest-all
+ 1.3
+
+
+ xalan
+ xalan
+ 2.7.0
+
+
+ dom4j
+ dom4j
+ 1.6.1
+
+
+
+ javax.xml.stream
+ stax-api
+ 1.0-2
+
+
+ jaxen
+ jaxen
+ 1.1.4
+
+
+ junit
+ junit
+ 4.11
+
+
+ commons-io
+ commons-io
+ 2.4
+
+
+
+ xml-apis
+ xml-apis
+ 1.0.b2
+
+
+
+ stax
+ stax-api
+ 1.0.1
+
+
+
+ org.mockito
+ mockito-all
+ ${mockito.version}
+
+
+
+ com.google.guava
+ guava
+ ${guava.version}
+
+
+
+ com.lyncode
+ builder-commons
+ ${lyncode.builder-commons}
+
+
+
+
+
+
+
+ DSpace @ Lyncode
+ dspace@lyncode.com
+ Lyncode
+ http://www.lyncode.com
+
+
+
+
diff --git a/pom.xml b/pom.xml
index b3f59ca81d4..56edef4f9d8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
edu.harvard.iq
dataverse
- 4.4
+ 4.5
war
dataverse
@@ -374,7 +374,35 @@
log4j
1.2.17
-
+
+
+
+
+
+
+
+ com.lyncode
+ xoai-common
+ 4.1.0-header-patch
+
+
+ com.lyncode
+ xoai-data-provider
+ 4.1.0-header-patch
+
+
+ com.lyncode
+ xoai-service-provider
+ 4.1.0-header-patch
+
+
+
+ com.google.auto.service
+ auto-service
+ 1.0-rc2
+ true
+ jar
+
@@ -427,6 +455,7 @@
maven-war-plugin
2.3
+ true
false
diff --git a/scripts/database/homebrew/rebuild-and-test b/scripts/database/homebrew/rebuild-and-test
index 70278083ee8..670fb847493 100755
--- a/scripts/database/homebrew/rebuild-and-test
+++ b/scripts/database/homebrew/rebuild-and-test
@@ -1,5 +1,7 @@
#!/bin/sh
scripts/database/homebrew/run-post-create-post-deploy
+echo "Publishing root dataverse"
+scripts/search/tests/publish-dataverse-root
echo "---"
echo "Creating search users"
scripts/search/populate-users > /dev/null
diff --git a/scripts/database/reference_data.sql b/scripts/database/reference_data.sql
index 4f22772b5c5..2e71f0b4e56 100644
--- a/scripts/database/reference_data.sql
+++ b/scripts/database/reference_data.sql
@@ -17,6 +17,8 @@ INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname
INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (15, 'affiliation', 'authorAffiliation', TRUE, 3, 1 );
INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (16, ':contributor', 'contributorName', FALSE, NULL, 1 );
INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (17, 'type', 'contributorType', TRUE, 16, 1 );
+INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (18, ':publisher', 'producerName', FALSE, NULL, 1 );
+INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (19, ':language', 'language', FALSE, NULL, 1 );
INSERT INTO guestbook(
emailrequired, enabled, institutionrequired, createtime,
diff --git a/scripts/database/upgrades/upgrade_v4.4_to_v4.5.sql b/scripts/database/upgrades/upgrade_v4.4_to_v4.5.sql
new file mode 100644
index 00000000000..6f9a0ca73ef
--- /dev/null
+++ b/scripts/database/upgrades/upgrade_v4.4_to_v4.5.sql
@@ -0,0 +1,11 @@
+-- A Private URL is a specialized role assignment with a token.
+ALTER TABLE roleassignment ADD COLUMN privateurltoken character varying(255);
+-- "Last Export Time" added to the dataset:
+ALTER TABLE dataset ADD COLUMN lastExportTime TIMESTAMP;
+-- Direct link to the harvesting configuration, for harvested datasets:
+ALTER TABLE dataset ADD COLUMN harvestingClient_id bigint;
+-- For harveted datasets, native OAI identifier used by the original OAI server
+ALTER TABLE dataset ADD COLUMN harvestIdentifier VARCHAR(255);
+-- Add extra rules to the Dublin Core import logic:
+INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (18, ':publisher', 'producerName', FALSE, NULL, 1 );
+INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (19, ':language', 'language', FALSE, NULL, 1 );
diff --git a/scripts/deploy/phoenix.dataverse.org/dv-root.json b/scripts/deploy/phoenix.dataverse.org/dv-root.json
index aea377d88f0..20fa8905da5 100644
--- a/scripts/deploy/phoenix.dataverse.org/dv-root.json
+++ b/scripts/deploy/phoenix.dataverse.org/dv-root.json
@@ -1,6 +1,6 @@
{
"alias": "root",
- "name": "Phoenix",
+ "name": "Root",
"permissionRoot": false,
"facetRoot": true,
"description": "Welcome! phoenix.dataverse.org is so named because data here is deleted on every build of the latest Dataverse code: http://guides.dataverse.org/en/latest/developers ",
diff --git a/scripts/installer/glassfish-setup.sh b/scripts/installer/glassfish-setup.sh
index 4d195f077e9..397cebfc7e7 100755
--- a/scripts/installer/glassfish-setup.sh
+++ b/scripts/installer/glassfish-setup.sh
@@ -215,6 +215,8 @@ fi
./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.password=apitest"
./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.username=apitest"
./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.baseurlstring=https\://ezid.cdlib.org"
+# "I am the timer server" option:
+./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true"
# enable comet support
./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.comet-support-enabled="true"
diff --git a/scripts/installer/install b/scripts/installer/install
index 0475c37171f..071b8e7558b 100755
--- a/scripts/installer/install
+++ b/scripts/installer/install
@@ -452,7 +452,7 @@ chomp( $cwd = `pwd` );
# 2b. CHECK IF THE SQL TEMPLATE IS IN PLACE AND CREATE THE SQL FILE
-my $SQL_REFERENCE_DATA = "reference_data_filtered.sql";
+#my $SQL_REFERENCE_DATA = "reference_data_filtered.sql";
my $SQL_REFERENCE_TEMPLATE = "../database/reference_data.sql";
unless ( -f $SQL_REFERENCE_TEMPLATE ) {
@@ -466,16 +466,16 @@ unless ( -f $SQL_REFERENCE_TEMPLATE ) {
exit 0;
}
-open DATATEMPLATEIN, $SQL_REFERENCE_TEMPLATE || die $@;
-open SQLDATAOUT, '>' . $SQL_REFERENCE_DATA || die $@;
+#open DATATEMPLATEIN, $SQL_REFERENCE_TEMPLATE || die $@;
+#open SQLDATAOUT, '>' . $SQL_REFERENCE_DATA || die $@;
+#
+#while () {
+# s/dvnapp/$CONFIG_DEFAULTS{'POSTGRES_USER'}/g;
+# print SQLDATAOUT $_;
+#}
-while () {
- s/dvnapp/$CONFIG_DEFAULTS{'POSTGRES_USER'}/g;
- print SQLDATAOUT $_;
-}
-
-close DATATEMPLATEIN;
-close SQLDATAOUT;
+#close DATATEMPLATEIN;
+#close SQLDATAOUT;
# 3. CHECK POSTGRES AND JQ AVAILABILITY:
@@ -1082,7 +1082,7 @@ unless ((
print "\nPre-populating the database:\n\n";
-my $psql_command = $psql_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f $SQL_REFERENCE_DATA";
+my $psql_command = $psql_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f $SQL_REFERENCE_TEMPLATE";
unless ( ( my $exitcode = system("$psql_command") ) == 0 )
{
@@ -1093,7 +1093,7 @@ unless ( ( my $exitcode = system("$psql_command") ) == 0 )
print "new Dataverse to be available. \n";
print "\n";
print "You can try this again, by executing the following on the command line:\n";
- print " psql -U $CONFIG_DEFAULTS{'POSTGRES_USER'} -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f $SQL_REFERENCE_DATA\n";
+ print " psql -U $CONFIG_DEFAULTS{'POSTGRES_USER'} -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f $SQL_REFERENCE_TEMPLATE\n";
print "then re-start glassfish with \n\n";
print " " . $glassfish_dir . "/bin/asadmin stop-domain domain1\n\n";
print " " . $glassfish_dir . "/bin/asadmin start-domain domain1\n\n";
diff --git a/scripts/migration/migration_presteps.txt b/scripts/migration/migration_presteps.txt
index 517a967db78..df2be493449 100644
--- a/scripts/migration/migration_presteps.txt
+++ b/scripts/migration/migration_presteps.txt
@@ -127,13 +127,13 @@ psql -h localhost -U postgres <4.0 database name> -f /tmp/dvn3_data.sql
----------------------------------------------
-- offsets
-update _dvn3_vdcnetwork set id = id + (select max(id) from dvobject);
-update _dvn3_vdc set id = id + (select max(id) from _dvn3_vdcnetwork);
-update _dvn3_vdcrole set vdc_id = vdc_id + (select max(id) from _dvn3_vdcnetwork);
-update _dvn3_vdc_usergroup set vdcs_id = vdcs_id + (select max(id) from _dvn3_vdcnetwork);
-update _dvn3_vdc_linked_collections set vdc_id = vdc_id + (select max(id) from _dvn3_vdcnetwork);
-update _dvn3_study set owner_id = owner_id + (select max(id) from _dvn3_vdcnetwork);
-update _dvn3_vdccollection set owner_id = owner_id + (select max(id) from _dvn3_vdcnetwork);
+update _dvn3_vdcnetwork set id = id + (select coalesce(max(id), 0) from dvobject);
+update _dvn3_vdc set id = id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork);
+update _dvn3_vdcrole set vdc_id = vdc_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork);
+update _dvn3_vdc_usergroup set vdcs_id = vdcs_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork);
+update _dvn3_vdc_linked_collections set vdc_id = vdc_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork);
+update _dvn3_study set owner_id = owner_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork);
+update _dvn3_vdccollection set owner_id = owner_id + (select coalesce(max(id), 0) from _dvn3_vdcnetwork);
-- note: need to determine what offset to use, based on the file scripts
--update _dvn3_studyfile_vdcuser set studyfiles_id = studyfiles_id +100000;
diff --git a/scripts/rapache/build.sh b/scripts/rapache/build.sh
new file mode 100755
index 00000000000..fc4823775d5
--- /dev/null
+++ b/scripts/rapache/build.sh
@@ -0,0 +1,10 @@
+#!/bin/sh
+mkdir -p ~/rpmbuild/SOURCES
+mkdir -p ~/rpmbuild/SPECS
+wget https://github.com/jeffreyhorner/rapache/archive/v1.2.7.tar.gz -O rapache-1.2.7.tar.gz
+tar xzvf rapache-1.2.7.tar.gz rapache-1.2.7/rpm/rapache.spec --strip-components 2
+# Move to build dirs
+cp -f rapache-1.2.7.tar.gz ~/rpmbuild/SOURCES/
+cp -f rapache.spec ~/rpmbuild/SPECS/
+cd ~
+rpmbuild -ba ~/rpmbuild/SPECS/rapache.spec
diff --git a/scripts/search/tests/grant-authusers-add-on-root b/scripts/search/tests/grant-authusers-add-on-root
index 9b57a820aff..08b245fa561 100755
--- a/scripts/search/tests/grant-authusers-add-on-root
+++ b/scripts/search/tests/grant-authusers-add-on-root
@@ -1,5 +1,5 @@
#!/bin/sh
. scripts/search/export-keys
-OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \":authenticated-users\",\"role\": \"dvContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"`
+OUTPUT=`curl -s -X POST -H "Content-type:application/json" -d "{\"assignee\": \":authenticated-users\",\"role\": \"fullContributor\"}" "http://localhost:8080/api/dataverses/root/assignments?key=$ADMINKEY"`
echo $OUTPUT
echo $OUTPUT | jq ' .data | {assignee,_roleAlias}'
diff --git a/scripts/vagrant/rpmbuild.sh b/scripts/vagrant/rpmbuild.sh
new file mode 100755
index 00000000000..f10830afb5b
--- /dev/null
+++ b/scripts/vagrant/rpmbuild.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+rpm -Uvh http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-7.noarch.rpm
+yum install -y rpm-build httpd-devel libapreq2-devel R-devel
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index 8a310f95d1d..bc39855adc6 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -67,9 +67,11 @@ header.guides.user=User Guide
header.guides.developer=Developer Guide
header.guides.installation=Installation Guide
header.guides.api=API Guide
+header.guides.admin=Admin Guide
header.signUp=Sign Up
header.logOut=Log Out
header.accountInfo=Account Information
+header.dashboard=Dashboard
header.user.selectTab.dataRelated=My Data
header.user.selectTab.notifications=Notifications
header.user.selectTab.accountInfo=Account Information
@@ -223,6 +225,187 @@ apitoken.notFound=API Token for {0} has not been created.
apitoken.generateBtn=Create Token
apitoken.regenerateBtn=Recreate Token
+#dashboard.xhtml
+dashboard.title=Dashboard
+dashboard.card.harvestingclients.header=Harvesting Clients
+dashboard.card.harvestingclients.btn.manage=Manage Clients
+dashboard.card.harvestingclients.clients={0, choice, 0#Clients|1#Client|2#Clients}
+dashboard.card.harvestingclients.datasets={0, choice, 0#Datasets|1#Dataset|2#Datasets}
+dashboard.card.harvestingserver.header=Harvesting Server
+dashboard.card.harvestingserver.enabled=OAI server enabled
+dashboard.card.harvestingserver.disabled=OAI server disabled
+dashboard.card.harvestingserver.status=Status
+dashboard.card.harvestingserver.sets={0, choice, 0#Sets|1#Set|2#Sets}
+dashboard.card.harvestingserver.btn.manage=Manage Server
+dashboard.card.metadataexport.header=Metadata Export
+dashboard.card.metadataexport.message=Dataset metadata export is only available through the {0} API. Learn more in the {0} {1}API Guide{2}.
+
+#harvestclients.xhtml
+harvestclients.title=Manage Harvesting Clients
+harvestclients.toptip= - Harvesting can be scheduled to run at a specific time or on demand. Harvesting can be initiated here or via the REST API.
+
+harvestclients.noClients.label=No clients are configured.
+harvestclients.noClients.why.header=What is Harvesting?
+harvestclients.noClients.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting client , your Dataverse gathers metadata records from remote sources. These can be other Dataverse instances, or other archives that support OAI-PMH, the standard harvesting protocol.
+harvestclients.noClients.why.reason2=Harvested metadata records are searchable by users. Clicking on a harvested dataset in the search results takes the user to the original repository. Harvested datasets cannot be edited in your Dataverse installation.
+harvestclients.noClients.how.header=How To Use Harvesting
+harvestclients.noClients.how.tip1=To harvest metadata, a Harvesting Client is created and configured for each remote repository. Note that when creating a client you will need to select an existing local dataverse to host harvested datasets.
+
+harvestclients.noClients.how.tip2=Harvested records can be kept in sync with the original repository through scheduled incremental updates, for example, daily or weekly. Alternatively, harvests can be run on demand, from this page or via the REST API.
+harvestclients.noClients.getStarted=To get started, click on the Add Client button above. To learn more about Harvesting, visit the Harvesting section of the User Guide.
+
+harvestclients.btn.add=Add Client
+harvestclients.tab.header.name=Nickname
+harvestclients.tab.header.url=URL
+harvestclients.tab.header.lastrun=Last Run
+harvestclients.tab.header.lastresults=Last Results
+harvestclients.tab.header.action=Actions
+harvestclients.tab.header.action.btn.run=Run Harvesting
+harvestclients.tab.header.action.btn.edit=Edit
+harvestclients.tab.header.action.btn.delete=Delete
+harvestclients.tab.header.action.btn.delete.dialog.header=Delete Harvesting Client
+harvestclients.tab.header.action.btn.delete.dialog.warning=Are you sure you want to delete the harvesting client "{0}"? Deleting the client will delete all datasets harvested from this remote server.
+harvestclients.tab.header.action.btn.delete.dialog.tip=Note, this action may take a while to process, depending on the number of harvested datasets.
+harvestclients.tab.header.action.delete.infomessage=Harvesting client is being deleted. Note, that this may take a while, depending on the amount of harvested content.
+
+harvestclients.actions.runharvest.success=Successfully started an asynchronous harvest for client "{0}" . Please reload the page to check on the harvest results).
+
+harvestclients.newClientDialog.step1=Step 1 of 4 - Client Information
+harvestclients.newClientDialog.title.new=Create Harvesting Client
+harvestclients.newClientDialog.help=Configure a client to harvest content from a remote server.
+harvestclients.newClientDialog.nickname=Nickname
+harvestclients.newClientDialog.nickname.helptext=Consists of letters, digits, underscores (_) and dashes (-).
+harvestclients.newClientDialog.nickname.required=Client nickname cannot be empty!
+harvestclients.newClientDialog.nickname.invalid=Client nickname can contain only letters, digits, underscores (_) and dashes (-); and must be at most 30 characters.
+harvestclients.newClientDialog.nickname.alreadyused=This nickname is already used.
+
+harvestclients.newClientDialog.type=Server Protocol
+harvestclients.newClientDialog.type.helptext=Only the OAI server protocol is currently supported.
+harvestclients.newClientDialog.type.OAI=OAI
+harvestclients.newClientDialog.type.Nesstar=Nesstar
+
+harvestclients.newClientDialog.url=Server URL
+harvestclients.newClientDialog.url.tip=URL of a harvesting resource.
+harvestclients.newClientDialog.url.watermark=Remote harvesting server, http://...
+harvestclients.newClientDialog.url.helptext.notvalidated=URL of a harvesting resource. Once you click 'Next', we will try to establish a connection to the server in order to verify that it is working, and to obtain extra information about its capabilities.
+harvestclients.newClientDialog.url.required=A valid harvesting server address is required.
+harvestclients.newClientDialog.url.invalid=Invalid URL. Failed to establish connection and receive a valid server response.
+harvestclients.newClientDialog.url.noresponse=Failed to establish connection to the server.
+harvestclients.newClientDialog.url.badresponse=Invalid response from the server.
+
+harvestclients.newClientDialog.dataverse=Local Dataverse
+harvestclients.newClientDialog.dataverse.tip=Dataverse that will host the datasets harvested from this remote resource.
+harvestclients.newClientDialog.dataverse.menu.enterName=Enter Dataverse Alias
+harvestclients.newClientDialog.dataverse.menu.header=Dataverse Name (Affiliate), Alias
+harvestclients.newClientDialog.dataverse.menu.invalidMsg=No matches found
+harvestclients.newClientDialog.dataverse.required=You must select an existing dataverse for this harvesting client.
+
+harvestclients.newClientDialog.step2=Step 2 of 4 - Format
+harvestclients.newClientDialog.oaiSets=OAI Set
+harvestclients.newClientDialog.oaiSets.tip=Harvesting sets offered by this OAI server.
+harvestclients.newClientDialog.oaiSets.noset=None
+harvestclients.newClientDialog.oaiSets.helptext=Selecting "none" will harvest the default set, as defined by the server. Often this will be the entire body of content across all sub-sets.
+harvestclients.newClientDialog.oaiSets.helptext.noset=This OAI server does not support named sets. The entire body of content offered by the server will be harvested.
+
+harvestclients.newClientDialog.oaiMetadataFormat=Metadata Format
+harvestclients.newClientDialog.oaiMetadataFormat.tip=Metadata formats offered by the remote server.
+harvestclients.newClientDialog.oaiMetadataFormat.required=Please select the metadata format to harvest from this archive.
+
+harvestclients.newClientDialog.step3=Step 3 of 4 - Schedule
+harvestclients.newClientDialog.schedule=Schedule
+harvestclients.newClientDialog.schedule.tip=Schedule harvesting to run automatically daily or weekly.
+harvestclients.newClientDialog.schedule.time.none.helptext=Leave harvesting unscheduled to run on demand only.
+harvestclients.newClientDialog.schedule.none=None
+harvestclients.newClientDialog.schedule.daily=Daily
+harvestclients.newClientDialog.schedule.weekly=Weekly
+harvestclients.newClientDialog.schedule.time=Time
+harvestclients.newClientDialog.schedule.day=Day
+harvestclients.newClientDialog.schedule.time.am=AM
+harvestclients.newClientDialog.schedule.time.pm=PM
+harvestclients.newClientDialog.schedule.time.helptext=Scheduled times are in your local time.
+harvestclients.newClientDialog.btn.create=Create Client
+harvestclients.newClientDialog.success=Successfully created harvesting client "{0}".
+
+harvestclients.newClientDialog.step4=Step 4 of 4 - Display
+harvestclients.newClientDialog.harvestingStyle=Archive Type
+harvestclients.newClientDialog.harvestingStyle.tip=Type of remote archive.
+harvestclients.newClientDialog.harvestingStyle.helptext=Select the archive type that best describes this remote server in order to properly apply formatting rules and styles to the harvested metadata as they are shown in the search results. Note that improperly selecting the type of the remote archive can result in incomplete entries in the search results, and a failure to redirect the user to the archival source of the data.
+
+harvestclients.viewEditDialog.title=Edit Harvesting Client
+harvestclients.viewEditDialog.archiveUrl=Archive URL
+harvestclients.viewEditDialog.archiveUrl.tip=The URL of the archive that serves the data harvested by this client, which is used in search results for links to the original sources of the harvested content.
+harvestclients.viewEditDialog.archiveUrl.helptext=Edit if this URL differs from the Server URL.
+harvestclients.viewEditDialog.archiveDescription=Archive Description
+harvestclients.viewEditDialog.archiveDescription.tip=Description of the archival source of the harvested content, displayed in search results.
+harvestclients.viewEditDialog.archiveDescription.default.generic=This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data.
+
+harvestclients.viewEditDialog.btn.save=Save Changes
+harvestclients.newClientDialog.title.edit=Edit Group {0}
+
+#harvestset.xhtml
+harvestserver.title=Manage Harvesting Server
+harvestserver.toptip= - Define sets of local datasets that will be available for harvesting by remote clients.
+harvestserver.service.label=OAI Server
+harvestserver.service.enabled=Enabled
+harvestserver.service.disabled=Disabled
+harvestserver.service.disabled.msg=Harvesting Server is currently disabled.
+harvestserver.service.empty=No sets are configured.
+harvestserver.service.enable.success=OAI Service has been successfully enabled.
+
+harvestserver.noSets.why.header=What is a Harvesting Server?
+harvestserver.noSets.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting server , your Dataverse can make some of the local dataset metadata available to remote harvesting clients. These can be other Dataverse instances, or any other clients that support OAI-PMH harvesting protocol.
+harvestserver.noSets.why.reason2=Only the published, unrestricted datasets in your Dataverse can be harvested. Remote clients normally keep their records in sync through scheduled incremental updates, daily or weekly, thus minimizing the load on your server. Note that it is only the metadata that are harvested. Remote harvesters will generally not attempt to download the data files themselves.
+
+harvestserver.noSets.how.header=How to run a Harvesting Server?
+harvestserver.noSets.how.tip1=Harvesting server can be enabled or disabled on this page.
+harvestserver.noSets.how.tip2=Once the service is enabled, you can define collections of local datasets that will be available to remote harvesters as OAI Sets . Sets are defined by search queries (for example, authorName:king; or parentId:1234 - to select all the datasets that belong to the dataverse specified; or dsPersistentId:"doi:1234/" to select all the datasets with the persistent identifier authority specified). Consult the Search API section of the Dataverse User Guide for more information on the search queries.
+
+harvestserver.noSets.getStarted=To get started, enable the OAI server and click on the Add Set button. To learn more about Harvesting, visit the Harvesting section of the User Guide.
+
+harvestserver.btn.add=Add Set
+harvestserver.tab.header.spec=OAI setSpec
+harvestserver.tab.header.description=Description
+harvestserver.tab.header.definition=Definition Query
+harvestserver.tab.header.stats=Datasets
+harvestserver.tab.col.stats.empty=No records (empty set)
+harvestserver.tab.col.stats.results={0} {0, choice, 0#datasets|1#dataset|2#datasets} ({1} {1, choice, 0#records|1#record|2#records} exported, {2} marked as deleted)
+harvestserver.tab.header.action=Actions
+harvestserver.tab.header.action.btn.export=Run Export
+harvestserver.actions.runreexport.success=Successfully started an asynchronous re-export job for OAI set "{0}" (please reload the page to check on the export progress).
+harvestserver.tab.header.action.btn.edit=Edit
+harvestserver.tab.header.action.btn.delete=Delete
+harvestserver.tab.header.action.btn.delete.dialog.header=Delete Harvesting Set
+harvestserver.tab.header.action.btn.delete.dialog.tip=Are you sure you want to delete the OAI set "{0}"? You cannot undo a delete!
+harvestserver.tab.header.action.delete.infomessage=Selected harvesting set is being deleted. (this may take a few moments)
+
+harvestserver.newSetDialog.title.new=Create Harvesting Set
+harvestserver.newSetDialog.help=Define a set of local datasets available for harvesting to remote clients.
+
+harvestserver.newSetDialog.setspec=Name/OAI setSpec
+harvestserver.newSetDialog.setspec.tip=A unique name (OAI setSpec) identifying this set.
+harvestserver.newSetDialog.setspec.helptext=Consists of letters, digits, underscores (_) and dashes (-).
+harvestserver.editSetDialog.setspec.helptext=The name can not be changed once the set has been created.
+harvestserver.newSetDialog.setspec.required=Name (OAI setSpec) cannot be empty!
+harvestserver.newSetDialog.setspec.invalid=Name (OAI setSpec) can contain only letters, digits, underscores (_) and dashes (-).
+harvestserver.newSetDialog.setspec.alreadyused=This set name (OAI setSpec) is already used.
+
+harvestserver.newSetDialog.setdescription=Description
+harvestserver.newSetDialog.setdescription.tip=Provide a brief description for this OAI set.
+harvestserver.newSetDialog.setdescription.required=Set description cannot be empty!
+
+harvestserver.newSetDialog.setquery=Definition Query
+harvestserver.newSetDialog.setquery.tip=Search query that defines the content of the dataset.
+harvestserver.newSetDialog.setquery.helptext=Example query: authorName:king
+harvestserver.newSetDialog.setquery.required=Search query cannot be left empty!
+harvestserver.newSetDialog.setquery.results=Search query returned {0} datasets!
+harvestserver.newSetDialog.setquery.empty=WARNING: Search query returned no results!
+
+harvestserver.newSetDialog.btn.create=Create Set
+harvestserver.newSetDialog.success=Successfully created harvesting set "{0}".
+
+harvestserver.viewEditDialog.title=Edit Harvesting Set
+harvestserver.viewEditDialog.btn.save=Save Changes
+
#MailServiceBean.java
notification.email.create.dataverse.subject=Dataverse: Your dataverse has been created
@@ -794,9 +977,14 @@ dataset.editBtn.itemLabel.metadata=Metadata
dataset.editBtn.itemLabel.terms=Terms
dataset.editBtn.itemLabel.permissions=Permissions
dataset.editBtn.itemLabel.widgets=Widgets
+dataset.editBtn.itemLabel.privateUrl=Private URL
dataset.editBtn.itemLabel.deleteDataset=Delete Dataset
dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version
dataset.editBtn.itemLabel.deaccession=Deaccession Dataset
+dataset.exportBtn=Export Metadata
+dataset.exportBtn.itemLabel.ddi=DDI
+dataset.exportBtn.itemLabel.dublinCore=Dublin Core
+dataset.exportBtn.itemLabel.json=JSON
metrics.title=Metrics
metrics.comingsoon=Coming soon...
metrics.views=Views
@@ -906,6 +1094,20 @@ dataset.mixedSelectedFilesForDownload=The restricted file(s) selected may not be
dataset.downloadUnrestricted=Click Continue to download the files you have access to download.
dataset.requestAccessToRestrictedFiles=You may request access to the restricted file(s) by clicking the Request Access button.
+dataset.privateurl.infoMessageAuthor=Unpublished Dataset Private URL - Privately share this dataset before it is published: {0}
+dataset.privateurl.infoMessageReviewer=Unpublished Dataset Private URL - This unpublished dataset is being privately shared. You will not be able to access it when logged into your Dataverse account.
+dataset.privateurl.header=Unpublished Dataset Private URL
+dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide .
+dataset.privateurl.absent=Private URL has not been created.
+dataset.privateurl.createPrivateUrl=Create Private URL
+dataset.privateurl.disablePrivateUrl=Disable Private URL
+dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL
+dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset.
+dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets.
+dataset.privateurl.roleassigeeTitle=Private URL Enabled
+dataset.privateurl.createdSuccess=Success!
+dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset.
+dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}.
file.count={0} {0, choice, 0#Files|1#File|2#Files}
file.count.selected={0} {0, choice, 0#Files Selected|1#File Selected|2#Files Selected}
file.selectToAddBtn=Select Files to Add
diff --git a/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java b/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java
new file mode 100644
index 00000000000..e97f5a9ecda
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java
@@ -0,0 +1,191 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse;
+
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean;
+import edu.harvard.iq.dataverse.harvest.server.OAISet;
+import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean;
+import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+import java.util.List;
+import java.util.logging.Logger;
+import javax.ejb.EJB;
+import javax.faces.application.FacesMessage;
+import javax.faces.context.FacesContext;
+import javax.faces.view.ViewScoped;
+import javax.inject.Inject;
+import javax.inject.Named;
+
+/**
+ *
+ * @author Leonid Andreev
+ */
+@ViewScoped
+@Named
+public class DashboardPage implements java.io.Serializable {
+
+ private static final Logger logger = Logger.getLogger(DashboardPage.class.getCanonicalName());
+
+ @EJB
+ HarvestingClientServiceBean harvestingClientService;
+ @EJB
+ OAISetServiceBean oaiSetService;
+ @EJB
+ SystemConfig systemConfig;
+
+ @Inject
+ DataverseSession session;
+ @Inject
+ NavigationWrapper navigationWrapper;
+
+ /*
+ in breadcrumbs the dashboard page always appears as if it belongs to the
+ root dataverse ("Root Dataverse -> Dashboard") - because it is for the
+ top-level, site-wide controls only available to the site admin.
+ but it should still be possible to pass the id of the dataverse that was
+ current when the admin chose to go to the dashboard. This way certain values
+ can be pre-selected, etc. -- L.A. 4.5
+ */
+ private Dataverse dataverse;
+ private Long dataverseId = null;
+
+ public String init() {
+ if (!isSessionUserAuthenticated()) {
+ return "/loginpage.xhtml" + navigationWrapper.getRedirectPage();
+ } else if (!isSuperUser()) {
+ return navigationWrapper.notAuthorized();
+ }
+
+ /*
+ use this to add some kind of a tooltip/info message to the top of the page:
+ FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, JH.localize("dashboard.title"), JH.localize("dashboard.toptip")));
+ - the values for "dashboard.title" and "dashboard.toptip" would need to be added to the resource bundle.
+ */
+ return null;
+ }
+
+ public Dataverse getDataverse() {
+ return dataverse;
+ }
+
+ public void setDataverse(Dataverse dataverse) {
+ this.dataverse = dataverse;
+ }
+
+ public Long getDataverseId() {
+ return dataverseId;
+ }
+
+ public void setDataverseId(Long dataverseId) {
+ this.dataverseId = dataverseId;
+ }
+
+ public int getNumberOfConfiguredHarvestClients() {
+ List configuredHarvestingClients = harvestingClientService.getAllHarvestingClients();
+ if (configuredHarvestingClients == null || configuredHarvestingClients.isEmpty()) {
+ return 0;
+ }
+
+ return configuredHarvestingClients.size();
+ }
+
+ public long getNumberOfHarvestedDatasets() {
+ List configuredHarvestingClients = harvestingClientService.getAllHarvestingClients();
+ if (configuredHarvestingClients == null || configuredHarvestingClients.isEmpty()) {
+ return 0L;
+ }
+
+ Long numOfDatasets = harvestingClientService.getNumberOfHarvestedDatasetByClients(configuredHarvestingClients);
+
+ if (numOfDatasets != null && numOfDatasets > 0L) {
+ return numOfDatasets;
+ }
+
+ return 0L;
+ }
+
+ public boolean isHarvestServerEnabled() {
+ if (systemConfig.isOAIServerEnabled()) {
+ return true;
+ }
+ return false;
+ }
+
+ public int getNumberOfOaiSets() {
+ List configuredHarvestingSets = oaiSetService.findAll();
+ if (configuredHarvestingSets == null || configuredHarvestingSets.isEmpty()) {
+ return 0;
+ }
+
+ return configuredHarvestingSets.size();
+ }
+
+ @Deprecated
+ public String getHarvestClientsInfoLabel() {
+ List configuredHarvestingClients = harvestingClientService.getAllHarvestingClients();
+ if (configuredHarvestingClients == null || configuredHarvestingClients.isEmpty()) {
+ return JH.localize("harvestclients.noClients.label");
+ }
+
+ String infoLabel;
+
+ if (configuredHarvestingClients.size() == 1) {
+ infoLabel = configuredHarvestingClients.size() + " configured harvesting client; ";
+ } else {
+ infoLabel = configuredHarvestingClients.size() + " harvesting clients configured; ";
+ }
+
+ Long numOfDatasets = harvestingClientService.getNumberOfHarvestedDatasetByClients(configuredHarvestingClients);
+
+ if (numOfDatasets != null && numOfDatasets > 0L) {
+ return infoLabel + numOfDatasets + " harvested datasets";
+ }
+ return infoLabel + "no datasets harvested.";
+ }
+
+ @Deprecated
+ public String getHarvestServerInfoLabel() {
+ if (!systemConfig.isOAIServerEnabled()) {
+ return "OAI server disabled.";
+ }
+
+ String infoLabel = "OAI server enabled; ";
+
+ List configuredHarvestingSets = oaiSetService.findAll();
+ if (configuredHarvestingSets == null || configuredHarvestingSets.isEmpty()) {
+ infoLabel = infoLabel.concat(JH.localize("harvestserver.service.empty"));
+ return infoLabel;
+ }
+
+ infoLabel = infoLabel.concat(configuredHarvestingSets.size() + " configured OAI sets. ");
+ return infoLabel;
+ }
+
+ public boolean isSessionUserAuthenticated() {
+
+ if (session == null) {
+ return false;
+ }
+
+ if (session.getUser() == null) {
+ return false;
+ }
+
+ if (session.getUser().isAuthenticated()) {
+ return true;
+ }
+
+ return false;
+ }
+
+ public boolean isSuperUser() {
+ return session.getUser().isSuperuser();
+ }
+
+}
+
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
index def85232b22..3e6bba7e434 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
@@ -5,6 +5,7 @@
*/
package edu.harvard.iq.dataverse;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
@@ -57,6 +58,8 @@ public DataCitation(DatasetVersion dsv) {
year = sdf.format(sdf.parse(dsv.getDistributionDate()));
} catch (ParseException ex) {
// ignore
+ } catch (Exception ex) {
+ // ignore
}
}
@@ -67,11 +70,16 @@ public DataCitation(DatasetVersion dsv) {
// It is always part of the citation for the local datasets;
// And for *some* harvested datasets.
if (!dsv.getDataset().isHarvested()
- || HarvestingDataverseConfig.HARVEST_STYLE_VDC.equals(dsv.getDataset().getOwner().getHarvestingDataverseConfig().getHarvestStyle())
- || HarvestingDataverseConfig.HARVEST_STYLE_ICPSR.equals(dsv.getDataset().getOwner().getHarvestingDataverseConfig().getHarvestStyle())
- || HarvestingDataverseConfig.HARVEST_STYLE_DATAVERSE.equals(dsv.getDataset().getOwner().getHarvestingDataverseConfig().getHarvestStyle())) {
+ || HarvestingClient.HARVEST_STYLE_VDC.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())
+ || HarvestingClient.HARVEST_STYLE_ICPSR.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())
+ || HarvestingClient.HARVEST_STYLE_DATAVERSE.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())) {
if (!StringUtils.isEmpty(dsv.getDataset().getIdentifier())) {
- persistentId = new GlobalId(dsv.getDataset().getGlobalId());
+ // creating a global id like this:
+ // persistentId = new GlobalId(dsv.getDataset().getGlobalId());
+ // you end up doing new GlobalId((New GlobalId(dsv.getDataset())).toString())
+ // - doing an extra formatting-and-parsing-again
+ // This achieves the same thing:
+ persistentId = new GlobalId(dsv.getDataset());
}
}
@@ -80,9 +88,7 @@ public DataCitation(DatasetVersion dsv) {
distributors = dsv.getRootDataverseNameforCitation();
} else {
distributors = dsv.getDistributorName();
- if (!StringUtils.isEmpty(distributors)) {
- distributors += " [distributor]";
- }
+ //remove += [distributor] SEK 8-18-2016
}
// version
@@ -153,7 +159,9 @@ public String toString(boolean html) {
citationList.add(formatString(authors, html));
citationList.add(year);
citationList.add(formatString(title, html, "\""));
- citationList.add(formatURL(persistentId.toString(), persistentId.toURL().toString(), html));
+ if (persistentId != null) {
+ citationList.add(formatURL(persistentId.toString(), persistentId.toURL().toString(), html));
+ }
citationList.add(formatString(distributors, html));
citationList.add(version);
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 8afd52ac181..24c01e3b107 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -541,16 +541,8 @@ public void setFileAccessRequesters(List fileAccessRequesters
this.fileAccessRequesters = fileAccessRequesters;
}
-
public boolean isHarvested() {
- // TODO:
- // alternatively, we can determine whether this is a harvested file
- // by looking at the storage identifier of the physical file;
- // if it's something that's not a filesystem path (URL, etc.) -
- // then it's a harvested object.
- // -- L.A. 4.0
- // OK, here: (4.2.2)
// (storageIdentifier is not nullable - so no need to check for null
// pointers below):
if (this.getStorageIdentifier().startsWith("http://") || this.getStorageIdentifier().startsWith("https://")) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java b/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java
index dbc4a3c4788..67b1fd1dd1c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java
@@ -9,7 +9,6 @@
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
-import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index 3ec5733fc67..c2ecce1543e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -10,6 +10,7 @@
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
import edu.harvard.iq.dataverse.search.SolrSearchResult;
import edu.harvard.iq.dataverse.search.SortBy;
import edu.harvard.iq.dataverse.util.FileSortFieldAndOrder;
@@ -752,6 +753,12 @@ public void removeFileMetadata(FileMetadata fileMetadata) {
em.remove(mergedFM);
}
+ public List findHarvestedFilesByClient(HarvestingClient harvestingClient) {
+ TypedQuery query = em.createQuery("SELECT d FROM DataFile d, DvObject o, Dataset s WHERE o.id = d.id AND o.owner.id = s.id AND s.harvestedFrom.id = :harvestingClientId", DataFile.class);
+ query.setParameter("harvestingClientId", harvestingClient.getId());
+ return query.getResultList();
+ }
+
/**/
public void generateStorageIdentifier(DataFile dataFile) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
index 5787580802f..0acf76dc00e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
@@ -1,10 +1,12 @@
package edu.harvard.iq.dataverse;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Objects;
@@ -53,6 +55,8 @@ public class Dataset extends DvObjectContainer {
@Temporal(value = TemporalType.TIMESTAMP)
private Date globalIdCreateTime;
+ @Temporal(value = TemporalType.TIMESTAMP)
+ private Date lastExportTime;
@NotBlank(message = "Please enter an identifier for your dataset.")
@Column(nullable = false)
@@ -148,6 +152,14 @@ public void setGlobalIdCreateTime(Date globalIdCreateTime) {
this.globalIdCreateTime = globalIdCreateTime;
}
+ public Date getLastExportTime() {
+ return lastExportTime;
+ }
+
+ public void setLastExportTime(Date lastExportTime) {
+ this.lastExportTime = lastExportTime;
+ }
+
public Guestbook getGuestbook() {
return guestbook;
}
@@ -241,6 +253,11 @@ private DatasetVersion createNewDatasetVersion(Template template) {
dsv.updateDefaultValuesFromTemplate(template);
} else {
latestVersion = getLatestVersionForCopy();
+
+ if (latestVersion.getUNF() != null){
+ dsv.setUNF(latestVersion.getUNF());
+ }
+
if (latestVersion.getDatasetFields() != null && !latestVersion.getDatasetFields().isEmpty()) {
dsv.setDatasetFields(dsv.copyDatasetFields(latestVersion.getDatasetFields()));
}
@@ -415,13 +432,15 @@ public DataFileCategory getCategoryByName(String categoryName) {
}
private Collection getCategoryNames() {
- ArrayList ret = new ArrayList<>();
if (dataFileCategories != null) {
- for (int i = 0; i < dataFileCategories.size(); i++) {
- ret.add(dataFileCategories.get(i).getName());
+ ArrayList ret = new ArrayList<>(dataFileCategories.size());
+ for ( DataFileCategory dfc : dataFileCategories ) {
+ ret.add( dfc.getName() );
}
+ return ret;
+ } else {
+ return new ArrayList<>();
}
- return ret;
}
public Path getFileSystemDirectory() {
@@ -497,7 +516,10 @@ public String getCitation(boolean isOnlineVersion, DatasetVersion version) {
}
public String getPublicationDateFormattedYYYYMMDD() {
- return new SimpleDateFormat("yyyy-MM-dd").format(getPublicationDate());
+ if (getPublicationDate() != null){
+ return new SimpleDateFormat("yyyy-MM-dd").format(getPublicationDate());
+ }
+ return null;
}
public DataFile getThumbnailFile() {
@@ -508,39 +530,53 @@ public void setThumbnailFile(DataFile thumbnailFile) {
this.thumbnailFile = thumbnailFile;
}
+ @ManyToOne
+ @JoinColumn(name="harvestingClient_id")
+ private HarvestingClient harvestedFrom;
+
+ public HarvestingClient getHarvestedFrom() {
+ return this.harvestedFrom;
+ }
+
+ public void setHarvestedFrom(HarvestingClient harvestingClientConfig) {
+ this.harvestedFrom = harvestingClientConfig;
+ }
+
+
public boolean isHarvested() {
- // TODO:
- // eventually, this will be more complex:
- // A dataverse owner will be able to harvest some datasets into
- // any dataverse, in addition to any local datasets there.
- // -- L.A. 4.0
- Dataverse ownerDataverse = this.getOwner();
- if (ownerDataverse != null) {
- return ownerDataverse.isHarvested();
- }
- return false;
+ return this.harvestedFrom != null;
+ }
+
+ private String harvestIdentifier;
+
+ public String getHarvestIdentifier() {
+ return harvestIdentifier;
+ }
+
+ public void setHarvestIdentifier(String harvestIdentifier) {
+ this.harvestIdentifier = harvestIdentifier;
}
public String getRemoteArchiveURL() {
if (isHarvested()) {
- if (HarvestingDataverseConfig.HARVEST_STYLE_DATAVERSE.equals(this.getOwner().getHarvestingDataverseConfig().getHarvestStyle())) {
- return this.getOwner().getHarvestingDataverseConfig().getArchiveUrl() + "/dataset.xhtml?persistentId=" + getGlobalId();
- } else if (HarvestingDataverseConfig.HARVEST_STYLE_VDC.equals(this.getOwner().getHarvestingDataverseConfig().getHarvestStyle())) {
- String rootArchiveUrl = this.getOwner().getHarvestingDataverseConfig().getHarvestingUrl();
+ if (HarvestingClient.HARVEST_STYLE_DATAVERSE.equals(this.getHarvestedFrom().getHarvestStyle())) {
+ return this.getHarvestedFrom().getArchiveUrl() + "/dataset.xhtml?persistentId=" + getGlobalId();
+ } else if (HarvestingClient.HARVEST_STYLE_VDC.equals(this.getHarvestedFrom().getHarvestStyle())) {
+ String rootArchiveUrl = this.getHarvestedFrom().getHarvestingUrl();
int c = rootArchiveUrl.indexOf("/OAIHandler");
if (c > 0) {
rootArchiveUrl = rootArchiveUrl.substring(0, c);
return rootArchiveUrl + "/faces/study/StudyPage.xhtml?globalId=" + getGlobalId();
}
- } else if (HarvestingDataverseConfig.HARVEST_STYLE_ICPSR.equals(this.getOwner().getHarvestingDataverseConfig().getHarvestStyle())) {
+ } else if (HarvestingClient.HARVEST_STYLE_ICPSR.equals(this.getHarvestedFrom().getHarvestStyle())) {
// For the ICPSR, it turns out that the best thing to do is to
// rely on the DOI to send the user to the right landing page for
// the study:
//String icpsrId = identifier;
- //return this.getOwner().getHarvestingDataverseConfig().getArchiveUrl() + "/icpsrweb/ICPSR/studies/"+icpsrId+"?q="+icpsrId+"&searchSource=icpsr-landing";
+ //return this.getOwner().getHarvestingClient().getArchiveUrl() + "/icpsrweb/ICPSR/studies/"+icpsrId+"?q="+icpsrId+"&searchSource=icpsr-landing";
return "http://doi.org/" + authority + "/" + identifier;
- } else if (HarvestingDataverseConfig.HARVEST_STYLE_NESSTAR.equals(this.getOwner().getHarvestingDataverseConfig().getHarvestStyle())) {
- String nServerURL = this.getOwner().getHarvestingDataverseConfig().getArchiveUrl();
+ } else if (HarvestingClient.HARVEST_STYLE_NESSTAR.equals(this.getHarvestedFrom().getHarvestStyle())) {
+ String nServerURL = this.getHarvestedFrom().getArchiveUrl();
// chop any trailing slashes in the server URL - or they will result
// in multiple slashes in the final URL pointing to the study
// on server of origin; Nesstar doesn't like it, apparently.
@@ -558,9 +594,9 @@ public String getRemoteArchiveURL() {
+ "&top=yes";
return NesstarWebviewPage;
- } else if (HarvestingDataverseConfig.HARVEST_STYLE_ROPER.equals(this.getOwner().getHarvestingDataverseConfig().getHarvestStyle())) {
- return this.getOwner().getHarvestingDataverseConfig().getArchiveUrl() + "/CFIDE/cf/action/catalog/abstract.cfm?archno=" + identifier;
- } else if (HarvestingDataverseConfig.HARVEST_STYLE_HGL.equals(this.getOwner().getHarvestingDataverseConfig().getHarvestStyle())) {
+ } else if (HarvestingClient.HARVEST_STYLE_ROPER.equals(this.getHarvestedFrom().getHarvestStyle())) {
+ return this.getHarvestedFrom().getArchiveUrl() + "/CFIDE/cf/action/catalog/abstract.cfm?archno=" + identifier;
+ } else if (HarvestingClient.HARVEST_STYLE_HGL.equals(this.getHarvestedFrom().getHarvestStyle())) {
// a bit of a hack, true.
// HGL documents, when turned into Dataverse studies/datasets
// all 1 datafile; the location ("storage identifier") of the file
@@ -576,9 +612,9 @@ public String getRemoteArchiveURL() {
}
}
}
- return this.getOwner().getHarvestingDataverseConfig().getArchiveUrl();
+ return this.getHarvestedFrom().getArchiveUrl();
}else {
- return this.getOwner().getHarvestingDataverseConfig().getArchiveUrl();
+ return this.getHarvestedFrom().getArchiveUrl();
}
}
@@ -587,7 +623,7 @@ public String getRemoteArchiveURL() {
public String getHarvestingDescription() {
if (isHarvested()) {
- return this.getOwner().getHarvestingDataverseConfig().getArchiveDescription();
+ return this.getHarvestedFrom().getArchiveDescription();
}
return null;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
index f5275873b4c..69de63c5fa6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
@@ -48,7 +48,8 @@ public class DatasetFieldConstant implements java.io.Serializable {
public final static String title = "title";
- public final static String subTitle="subTitle";
+ public final static String subTitle="subtitle"; //SEK 6-7-2016 to match what is in DB
+ public final static String alternativeTitle="alternativeTitle"; //missing from class
public final static String datasetId = "datasetId";
public final static String authorName ="authorName";
public final static String authorAffiliation = "authorAffiliation";
@@ -59,7 +60,7 @@ public class DatasetFieldConstant implements java.io.Serializable {
public final static String producerName="producerName";
public final static String producerURL="producerURL";
- public final static String producerLogo="producerLogo";
+ public final static String producerLogo="producerLogoURL";
public final static String producerAffiliation="producerAffiliation";
public final static String producerAbbreviation= "producerAbbreviation";
public final static String productionDate="productionDate";
@@ -71,14 +72,18 @@ public class DatasetFieldConstant implements java.io.Serializable {
public final static String grantNumberAgency="grantNumberAgency";
public final static String distributorName="distributorName";
public final static String distributorURL="distributorURL";
- public final static String distributorLogo="distributorLogo";
+ public final static String distributorLogo="distributorLogoURL";
public final static String distributionDate="distributionDate";
public final static String distributorContactName="distributorContactName";
public final static String distributorContactAffiliation="distributorContactAffiliation";
public final static String distributorContactEmail="distributorContactEmail";
public final static String distributorAffiliation="distributorAffiliation";
-
public final static String distributorAbbreviation="distributorAbbreviation";
+
+ public final static String contributor="contributor"; //SEK added for Dublin Core 6/22
+ public final static String contributorType="contributorType";
+ public final static String contributorName="contributorName";
+
public final static String depositor="depositor";
public final static String dateOfDeposit="dateOfDeposit";
public final static String seriesName="seriesName";
@@ -86,32 +91,40 @@ public class DatasetFieldConstant implements java.io.Serializable {
public final static String datasetVersionValue="datasetVersionValue";
public final static String versionDate="versionDate";
public final static String keywordValue="keywordValue";
- public final static String keywordVocab="keywordVocab";
- public final static String keywordVocabURI="keywordVocabURI";
+ public final static String keywordVocab="keywordVocabulary"; //SEK 6/10/2016 to match what is in the db
+ public final static String keywordVocabURI="keywordVocabularyURI"; //SEK 6/10/2016 to match what is in the db
public final static String topicClassValue="topicClassValue";
public final static String topicClassVocab="topicClassVocab";
public final static String topicClassVocabURI="topicClassVocabURI";
public final static String descriptionText="dsDescriptionValue";
public final static String descriptionDate="descriptionDate";
+ public final static String timePeriodCovered="timePeriodCovered"; // SEK added 6/13/2016
public final static String timePeriodCoveredStart="timePeriodCoveredStart";
public final static String timePeriodCoveredEnd="timePeriodCoveredEnd";
+ public final static String dateOfCollection="dateOfCollection"; // SEK added 6/13/2016
public final static String dateOfCollectionStart="dateOfCollectionStart";
public final static String dateOfCollectionEnd="dateOfCollectionEnd";
public final static String country="country";
public final static String geographicCoverage="geographicCoverage";
public final static String otherGeographicCoverage="otherGeographicCoverage";
+ public final static String city="city"; // SEK added 6/13/2016
+ public final static String state="state"; // SEK added 6/13/2016
public final static String geographicUnit="geographicUnit";
public final static String westLongitude="westLongitude";
public final static String eastLongitude="eastLongitude";
- public final static String northLatitude="northLatitude";
- public final static String southLatitude="southLatitude";
+ public final static String northLatitude="northLongitude"; //Changed to match DB - incorrectly entered into DB
+ public final static String southLatitude="southLongitude"; //Incorrect in DB
public final static String unitOfAnalysis="unitOfAnalysis";
public final static String universe="universe";
public final static String kindOfData="kindOfData";
public final static String timeMethod="timeMethod";
public final static String dataCollector="dataCollector";
+ public final static String collectorTraining="collectorTraining";
public final static String frequencyOfDataCollection="frequencyOfDataCollection";
public final static String samplingProcedure="samplingProcedure";
+ public final static String targetSampleSize = "targetSampleSize";
+ public final static String targetSampleActualSize = "targetSampleActualSize";
+ public final static String targetSampleSizeFormula = "targetSampleSizeFormula";
public final static String deviationsFromSampleDesign="deviationsFromSampleDesign";
public final static String collectionMode="collectionMode";
public final static String researchInstrument="researchInstrument";
@@ -127,6 +140,12 @@ public class DatasetFieldConstant implements java.io.Serializable {
public final static String datasetLevelErrorNotes="datasetLevelErrorNotes";
public final static String responseRate="responseRate";
public final static String samplingErrorEstimates="samplingErrorEstimates";
+
+ public final static String socialScienceNotes = "socialScienceNotes";
+ public final static String socialScienceNotesType = "socialScienceNotesType";
+ public final static String socialScienceNotesSubject = "socialScienceNotesSubject";
+ public final static String socialScienceNotesText = "socialScienceNotesText";
+
public final static String otherDataAppraisal="otherDataAppraisal";
public final static String placeOfAccess="placeOfAccess";
public final static String originalArchive="originalArchive";
@@ -148,6 +167,7 @@ public class DatasetFieldConstant implements java.io.Serializable {
public final static String relatedDatasets="relatedDatasets";
public final static String otherReferences="otherReferences";
public final static String notesText="notesText";
+ public final static String language="language";
public final static String noteInformationType="noteInformationType";
public final static String notesInformationSubject="notesInformationSubject";
public final static String subject="subject";
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index e4c44d36b5c..882608dff45 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -6,21 +6,32 @@
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
+import edu.harvard.iq.dataverse.authorization.users.GuestUser;
import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateGuestbookResponseCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.GetPrivateUrlCommand;
import edu.harvard.iq.dataverse.engine.command.impl.LinkDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
+import edu.harvard.iq.dataverse.export.ExportException;
+import edu.harvard.iq.dataverse.export.ExportService;
+import edu.harvard.iq.dataverse.export.spi.Exporter;
import edu.harvard.iq.dataverse.ingest.IngestRequest;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import edu.harvard.iq.dataverse.metadataimport.ForeignMetadataImportServiceBean;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlUtil;
import edu.harvard.iq.dataverse.search.SearchFilesServiceBean;
import edu.harvard.iq.dataverse.search.SortBy;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
@@ -143,6 +154,10 @@ public enum DisplayMode {
DatasetLinkingServiceBean dsLinkingService;
@EJB
SearchFilesServiceBean searchFilesService;
+ @EJB
+ DataverseRoleServiceBean dataverseRoleService;
+ @EJB
+ PrivateUrlServiceBean privateUrlService;
@Inject
DataverseRequestServiceBean dvRequestService;
@Inject
@@ -487,10 +502,19 @@ public boolean canDownloadFile(FileMetadata fileMetadata){
// --------------------------------------------------------------------
// --------------------------------------------------------------------
- // (2) Is user authenticated?
- // No? Then no button...
+ // (2) In Dataverse 4.3 and earlier we required that users be authenticated
+ // to download files, but in developing the Private URL feature, we have
+ // added a new subclass of "User" called "PrivateUrlUser" that returns false
+ // for isAuthenticated but that should be able to download restricted files
+ // when given the Member role (which includes the DownloadFile permission).
+ // This is consistent with how Builtin and Shib users (both are
+ // AuthenticatedUsers) can download restricted files when they are granted
+ // the Member role. For this reason condition 2 has been changed. Previously,
+ // we required isSessionUserAuthenticated to return true. Now we require
+ // that the User is not an instance of GuestUser, which is similar in
+ // spirit to the previous check.
// --------------------------------------------------------------------
- if (!(isSessionUserAuthenticated())){
+ if (session.getUser() instanceof GuestUser){
this.fileDownloadPermissionMap.put(fid, false);
return false;
}
@@ -1370,7 +1394,16 @@ public void setDisplayFileMetadata(List displayFileMetadata) {
private boolean readOnly = true;
private boolean metadataExportEnabled;
+ private String originalSourceUrl = null;
+ public String getOriginalSourceUrl() {
+ return originalSourceUrl;
+ }
+
+ public void setOriginalSourceUrl(String originalSourceUrl) {
+ this.originalSourceUrl = originalSourceUrl;
+ }
+
public String init() {
return init(true);
}
@@ -1559,6 +1592,20 @@ private String init(boolean initFull) {
return permissionsWrapper.notFound();
}
+ try {
+ privateUrl = commandEngine.submit(new GetPrivateUrlCommand(dvRequestService.getDataverseRequest(), dataset));
+ if (privateUrl != null) {
+ JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.privateurl.infoMessageAuthor", Arrays.asList(getPrivateUrlLink(privateUrl))));
+ }
+ } catch (CommandException ex) {
+ // No big deal. The user simply doesn't have access to create or delete a Private URL.
+ }
+ if (session.getUser() instanceof PrivateUrlUser) {
+ PrivateUrlUser privateUrlUser = (PrivateUrlUser) session.getUser();
+ if (dataset != null && dataset.getId().equals(privateUrlUser.getDatasetId())) {
+ JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.privateurl.infoMessageReviewer"));
+ }
+ }
return null;
}
@@ -2643,6 +2690,12 @@ public void deleteFiles() {
successMessage = successMessage.replace("{0}", fileNames);
JsfHelper.addFlashMessage(successMessage);
}
+
+ /*
+ Do note that if we are deleting any files that have UNFs (i.e.,
+ tabular files), we DO NEED TO RECALCULATE the UNF of the version!
+ - but we will do this inside the UpdateDatasetCommand.
+ */
}
public String save() {
@@ -3404,7 +3457,7 @@ public String getVariableMetadataURL(Long fileid) {
}
public String getTabularDataFileURL(Long fileid) {
- String myHostURL = getDataverseSiteUrl();;
+ String myHostURL = getDataverseSiteUrl();
String dataURL = myHostURL + "/api/access/datafile/" + fileid;
return dataURL;
@@ -3421,6 +3474,33 @@ public String getMetadataAsJsonUrl() {
}
return null;
}
+
+ public List< String[]> getExporters(){
+ List retList = new ArrayList();
+ String myHostURL = getDataverseSiteUrl();
+ for (String [] provider : ExportService.getInstance().getExportersLabels() ){
+ String formatName = provider[1];
+ String formatDisplayName = provider[0];
+
+ Exporter exporter = null;
+ try {
+ exporter = ExportService.getInstance().getExporter(formatName);
+ } catch (ExportException ex) {
+ exporter = null;
+ }
+ if (exporter != null && exporter.isAvailableToUsers()) {
+ // Not all metadata exports should be presented to the web users!
+ // Some are only for harvesting clients.
+
+ String[] temp = new String[2];
+ temp[0] = formatDisplayName;
+ temp[1] = myHostURL + "/api/datasets/export?exporter=" + formatName + "&persistentId=" + dataset.getGlobalId();
+ retList.add(temp);
+ }
+ }
+ return retList;
+ }
+
private FileMetadata fileMetadataSelected = null;
@@ -4281,4 +4361,61 @@ public String getSortByDescending() {
return SortBy.DESCENDING;
}
+ PrivateUrl privateUrl;
+
+ public PrivateUrl getPrivateUrl() {
+ return privateUrl;
+ }
+
+ public void setPrivateUrl(PrivateUrl privateUrl) {
+ this.privateUrl = privateUrl;
+ }
+
+ public void initPrivateUrlPopUp() {
+ if (privateUrl != null) {
+ setPrivateUrlJustCreatedToFalse();
+ }
+ }
+
+ boolean privateUrlWasJustCreated;
+
+ public boolean isPrivateUrlWasJustCreated() {
+ return privateUrlWasJustCreated;
+ }
+
+ public void setPrivateUrlJustCreatedToFalse() {
+ privateUrlWasJustCreated = false;
+ }
+
+ public void createPrivateUrl() {
+ try {
+ PrivateUrl createdPrivateUrl = commandEngine.submit(new CreatePrivateUrlCommand(dvRequestService.getDataverseRequest(), dataset));
+ privateUrl = createdPrivateUrl;
+ JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.privateurl.infoMessageAuthor", Arrays.asList(getPrivateUrlLink(privateUrl))));
+ privateUrlWasJustCreated = true;
+ } catch (CommandException ex) {
+ String msg = BundleUtil.getStringFromBundle("dataset.privateurl.noPermToCreate", PrivateUrlUtil.getRequiredPermissions(ex));
+ logger.info("Unable to create a Private URL for dataset id " + dataset.getId() + ". Message to user: " + msg + " Exception: " + ex);
+ JH.addErrorMessage(msg);
+ }
+ }
+
+ public void disablePrivateUrl() {
+ try {
+ commandEngine.submit(new DeletePrivateUrlCommand(dvRequestService.getDataverseRequest(), dataset));
+ privateUrl = null;
+ JH.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.privateurl.disabledSuccess"));
+ } catch (CommandException ex) {
+ logger.info("CommandException caught calling DeletePrivateUrlCommand: " + ex);
+ }
+ }
+
+ public boolean isUserCanCreatePrivateURL() {
+ return dataset.getLatestVersion().isDraft();
+ }
+
+ public String getPrivateUrlLink(PrivateUrl privateUrl) {
+ return privateUrl.getLink();
+ }
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
index 7ff6faee598..be0d5dd5c66 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
@@ -10,9 +10,13 @@
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
+import edu.harvard.iq.dataverse.export.ExportService;
+import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
@@ -21,8 +25,10 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
+import javax.ejb.Asynchronous;
import javax.ejb.EJB;
import javax.ejb.EJBException;
import javax.ejb.Stateless;
@@ -68,7 +74,12 @@ public class DatasetServiceBean implements java.io.Serializable {
@EJB
PermissionServiceBean permissionService;
-
+
+ @EJB
+ OAIRecordServiceBean recordService;
+
+ private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss");
+
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
@@ -103,6 +114,11 @@ private List findByOwnerId(Long ownerId, boolean onlyPublished) {
public List findAll() {
return em.createQuery("select object(o) from Dataset as o order by o.id").getResultList();
}
+
+
+ public List findAllLocalDatasetIds() {
+ return em.createQuery("SELECT o.id FROM Dataset o WHERE o.harvestedFrom IS null ORDER BY o.id", Long.class).getResultList();
+ }
/**
* For docs, see the equivalent method on the DataverseServiceBean.
@@ -190,7 +206,9 @@ public Dataset findByGlobalId(String globalId) {
query.setParameter("authority", authority);
foundDataset = (Dataset) query.getSingleResult();
} catch (javax.persistence.NoResultException e) {
- logger.info("no ds found: " + globalId);
+ // (set to .info, this can fill the log file with thousands of
+ // these messages during a large harvest run)
+ logger.fine("no ds found: " + globalId);
// DO nothing, just return null.
}
return foundDataset;
@@ -226,6 +244,12 @@ public boolean isUniqueIdentifier(String userIdentifier, String protocol, String
return u;
}
+ public DatasetVersion storeVersion( DatasetVersion dsv ) {
+ em.persist(dsv);
+ return dsv;
+ }
+
+
public String createCitationRIS(DatasetVersion version) {
return createCitationRIS(version, null);
}
@@ -532,6 +556,21 @@ public String getTitleFromLatestVersion(Long datasetId, boolean includeDraft){
}
+ public Dataset getDatasetByHarvestInfo(Dataverse dataverse, String harvestIdentifier) {
+ String queryStr = "SELECT d FROM Dataset d, DvObject o WHERE d.id = o.id AND o.owner.id = " + dataverse.getId() + " and d.harvestIdentifier = '" + harvestIdentifier + "'";
+ Query query = em.createQuery(queryStr);
+ List resultList = query.getResultList();
+ Dataset dataset = null;
+ if (resultList.size() > 1) {
+ throw new EJBException("More than one dataset found in the dataverse (id= " + dataverse.getId() + "), with harvestIdentifier= " + harvestIdentifier);
+ }
+ if (resultList.size() == 1) {
+ dataset = (Dataset) resultList.get(0);
+ }
+ return dataset;
+
+ }
+
public Long getDatasetVersionCardImage(Long versionId, User user) {
if (versionId == null) {
return null;
@@ -547,14 +586,14 @@ public Long getDatasetVersionCardImage(Long versionId, User user) {
*
* @return
*/
- public Map getHarvestingDescriptionsForHarvestedDatasets(Set datasetIds){
+ public Map getArchiveDescriptionsForHarvestedDatasets(Set datasetIds){
if (datasetIds == null || datasetIds.size() < 1) {
return null;
}
String datasetIdStr = Strings.join(datasetIds, ", ");
- String qstr = "SELECT d.id, h.archiveDescription FROM harvestingDataverseConfig h, dataset d, dvobject o WHERE d.id = o.id AND h.dataverse_id = o.owner_id AND d.id IN (" + datasetIdStr + ")";
+ String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, dataset d WHERE d.harvestingClient_id = h.id AND d.id IN (" + datasetIdStr + ")";
List searchResults = null;
try {
@@ -626,4 +665,107 @@ public boolean isDatasetCardImageAvailable(DatasetVersion datasetVersion, User u
return false;
}
+
+
+ // reExportAll *forces* a reexport on all published datasets; whether they
+ // have the "last export" time stamp set or not.
+ @Asynchronous
+ public void reExportAllAsync() {
+ exportAllDatasets(true);
+ }
+
+ public void reExportAll() {
+ exportAllDatasets(true);
+ }
+
+
+ // exportAll() will try to export the yet unexported datasets (it will honor
+ // and trust the "last export" time stamp).
+
+ @Asynchronous
+ public void exportAllAsync() {
+ exportAllDatasets(false);
+ }
+
+ public void exportAll() {
+ exportAllDatasets(false);
+ }
+
+ public void exportAllDatasets(boolean forceReExport) {
+ Integer countAll = 0;
+ Integer countSuccess = 0;
+ Integer countError = 0;
+ String logTimestamp = logFormatter.format(new Date());
+ Logger exportLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.DatasetServiceBean." + "ExportAll" + logTimestamp);
+ String logFileName = "../logs" + File.separator + "export_" + logTimestamp + ".log";
+ FileHandler fileHandler = null;
+ boolean fileHandlerSuceeded = false;
+ try {
+ fileHandler = new FileHandler(logFileName);
+ exportLogger.setUseParentHandlers(false);
+ fileHandlerSuceeded = true;
+ } catch (IOException ex) {
+ Logger.getLogger(DatasetServiceBean.class.getName()).log(Level.SEVERE, null, ex);
+ } catch (SecurityException ex) {
+ Logger.getLogger(DatasetServiceBean.class.getName()).log(Level.SEVERE, null, ex);
+ }
+
+ if (fileHandlerSuceeded) {
+ exportLogger.addHandler(fileHandler);
+ } else {
+ exportLogger = null;
+ exportLogger = logger;
+ }
+
+ exportLogger.info("Starting an export all job");
+
+ for (Long datasetId : findAllLocalDatasetIds()) {
+ // Potentially, there's a godzillion datasets in this Dataverse.
+ // This is why we go through the list of ids here, and instantiate
+ // only one dataset at a time.
+ Dataset dataset = this.find(datasetId);
+ if (dataset != null) {
+ // Accurate "is published?" test - ?
+ // Answer: Yes, it is! We can't trust dataset.isReleased() alone; because it is a dvobject method
+ // that returns (publicationDate != null). And "publicationDate" is essentially
+ // "the first publication date"; that stays the same as versions get
+ // published and/or deaccessioned. But in combination with !isDeaccessioned()
+ // it is indeed an accurate test.
+ if (dataset.isReleased() && dataset.getReleasedVersion() != null && !dataset.isDeaccessioned()) {
+
+ // can't trust dataset.getPublicationDate(), no.
+ Date publicationDate = dataset.getReleasedVersion().getReleaseTime(); // we know this dataset has a non-null released version! Maybe not - SEK 8/19 (We do now! :)
+ if (forceReExport || (publicationDate != null
+ && (dataset.getLastExportTime() == null
+ || dataset.getLastExportTime().before(publicationDate)))) {
+ countAll++;
+ try {
+ recordService.exportAllFormatsInNewTransaction(dataset);
+ exportLogger.info("Success exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalId());
+ countSuccess++;
+ } catch (Exception ex) {
+ exportLogger.info("Error exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalId() + "; " + ex.getMessage());
+ countError++;
+ }
+ }
+ }
+ dataset = null;
+ }
+ }
+ exportLogger.info("Datasets processed: " + countAll.toString());
+ exportLogger.info("Datasets exported successfully: " + countSuccess.toString());
+ exportLogger.info("Datasets failures: " + countError.toString());
+ exportLogger.info("Finished export-all job.");
+
+ if (fileHandlerSuceeded) {
+ fileHandler.close();
+ }
+
+ }
+
+ public void updateLastExportTimeStamp(Long datasetId) {
+ Date now = new Date();
+ em.createNativeQuery("UPDATE Dataset SET lastExportTime='"+now.toString()+"' WHERE id="+datasetId).executeUpdate();
+ }
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
index 469375828c9..3be005a252d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
@@ -785,7 +785,7 @@ public String getDistributionDate() {
}
public String getDistributorName() {
- for (DatasetField dsf : this.getDatasetFields()) {
+ for (DatasetField dsf : this.getFlatDatasetFields()) {
if (DatasetFieldConstant.distributorName.equals(dsf.getDatasetFieldType().getName())) {
return dsf.getValue();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
index 644211c9c37..cb04cfb0d9a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
@@ -1,8 +1,10 @@
package edu.harvard.iq.dataverse;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch;
import java.util.ArrayList;
+import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
@@ -39,7 +41,9 @@
*/
@NamedQueries({
@NamedQuery(name = "Dataverse.ownedObjectsById", query = "SELECT COUNT(obj) FROM DvObject obj WHERE obj.owner.id=:id"),
- @NamedQuery(name = "Dataverse.findByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias)=:alias")
+ @NamedQuery(name = "Dataverse.findByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias)=:alias"),
+ @NamedQuery(name = "Dataverse.filterByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias) LIKE :alias order by dv.alias"),
+ @NamedQuery(name = "Dataverse.filterByAliasNameAffiliation", query="SELECT dv FROM Dataverse dv WHERE (LOWER(dv.alias) LIKE :alias) OR (LOWER(dv.name) LIKE :name) OR (LOWER(dv.affiliation) LIKE :affiliation) order by dv.alias")
})
@Entity
@Table(indexes = {@Index(columnList="fk_dataverse_id")
@@ -287,20 +291,22 @@ public void setGuestbooks(List guestbooks) {
this.guestbooks = guestbooks;
}
- @OneToOne (mappedBy="dataverse", cascade={CascadeType.PERSIST, CascadeType.REMOVE})
- private HarvestingDataverseConfig harvestingDataverseConfig;
+
+ @OneToMany (mappedBy="dataverse", cascade={CascadeType.MERGE, CascadeType.REMOVE})
+ private List harvestingClientConfigs;
- public HarvestingDataverseConfig getHarvestingDataverseConfig() {
- return this.harvestingDataverseConfig;
+ public List getHarvestingClientConfigs() {
+ return this.harvestingClientConfigs;
}
- public void setHarvestingDataverseConfig(HarvestingDataverseConfig harvestingDataverseConfig) {
- this.harvestingDataverseConfig = harvestingDataverseConfig;
+ public void setHarvestingClientConfigs(List harvestingClientConfigs) {
+ this.harvestingClientConfigs = harvestingClientConfigs;
}
-
+ /*
public boolean isHarvested() {
- return harvestingDataverseConfig != null;
+ return harvestingClient != null;
}
+ */
public List getParentGuestbooks() {
@@ -661,13 +667,24 @@ public void setFacetRoot(boolean facetRoot) {
public void addRole(DataverseRole role) {
role.setOwner(this);
+ if ( roles == null ) {
+ roles = new HashSet<>();
+ }
roles.add(role);
}
-
+
+ /**
+ * Note: to add a role, use {@link #addRole(edu.harvard.iq.dataverse.authorization.DataverseRole)},
+ * do not call this method and try to add directly to the list.
+ * @return the roles defined in this Dataverse.
+ */
public Set getRoles() {
+ if ( roles == null ) {
+ roles = new HashSet<>();
+ }
return roles;
}
-
+
public List getOwners() {
List owners = new ArrayList();
if (getOwner() != null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java
index 701b9bec059..685ec8132b3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java
@@ -26,6 +26,7 @@ public class DataverseConverter implements Converter {
@Override
public Object getAsObject(FacesContext facesContext, UIComponent component, String submittedValue) {
return dataverseService.find(new Long(submittedValue));
+ //return dataverseService.findByAlias(submittedValue);
}
@Override
@@ -34,6 +35,7 @@ public String getAsString(FacesContext facesContext, UIComponent component, Obje
return "";
} else {
return ((Dataverse) value).getId().toString();
+ //return ((Dataverse) value).getAlias();
}
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java
index 019bc429374..7e50c9a4148 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java
@@ -2,6 +2,7 @@
import edu.harvard.iq.dataverse.util.LruCache;
import java.util.List;
+import javax.ejb.EJB;
import javax.ejb.Stateless;
import javax.inject.Named;
import javax.persistence.EntityManager;
@@ -22,6 +23,9 @@ public class DataverseFacetServiceBean implements java.io.Serializable {
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
+ @EJB
+ DataverseServiceBean dataverses;
+
public List findByDataverseId(Long dataverseId) {
List res = cache.get(dataverseId);
@@ -48,19 +52,22 @@ public void deleteFacetsFor( Dataverse d ) {
}
- public void create(int diplayOrder, Long datasetFieldId, Long dataverseId) {
+ public DataverseFacet create(int displayOrder, DatasetFieldType fieldType, Dataverse ownerDv) {
DataverseFacet dataverseFacet = new DataverseFacet();
- dataverseFacet.setDisplayOrder(diplayOrder);
-
- DatasetFieldType dsfType = (DatasetFieldType)em.find(DatasetFieldType.class,datasetFieldId);
- dataverseFacet.setDatasetFieldType(dsfType);
-
- Dataverse dataverse = (Dataverse)em.find(Dataverse.class,dataverseId);
- dataverseFacet.setDataverse(dataverse);
+ dataverseFacet.setDisplayOrder(displayOrder);
+ dataverseFacet.setDatasetFieldType(fieldType);
+ dataverseFacet.setDataverse(ownerDv);
- dataverse.getDataverseFacets().add(dataverseFacet);
+ ownerDv.getDataverseFacets().add(dataverseFacet);
em.persist(dataverseFacet);
+ return dataverseFacet;
+ }
+
+ public DataverseFacet create(int displayOrder, Long datasetFieldTypeId, Long dataverseId) {
+ return create( displayOrder,
+ (DatasetFieldType)em.find(DatasetFieldType.class,datasetFieldTypeId),
+ dataverses.find(dataverseId) );
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
index b5f5d78f716..74d992be9fa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
@@ -411,13 +411,30 @@ public List findDataversesThatLinkToThisDatasetId(long datasetId) {
return datasetLinkingService.findLinkingDataverses(datasetId);
}
+ public List filterByAliasQuery(String filterQuery) {
+ //Query query = em.createNativeQuery("select o from Dataverse o where o.alias LIKE '" + filterQuery + "%' order by o.alias");
+ //Query query = em.createNamedQuery("Dataverse.filterByAlias", Dataverse.class).setParameter("alias", filterQuery.toLowerCase() + "%");
+ Query query = em.createNamedQuery("Dataverse.filterByAliasNameAffiliation", Dataverse.class)
+ .setParameter("alias", filterQuery.toLowerCase() + "%")
+ .setParameter("name", "%" + filterQuery.toLowerCase() + "%")
+ .setParameter("affiliation", "%" + filterQuery.toLowerCase() + "%");
+ //logger.info("created native query: select o from Dataverse o where o.alias LIKE '" + filterQuery + "%' order by o.alias");
+ logger.info("created named query");
+ List ret = query.getResultList();
+ if (ret != null) {
+ logger.info("results list: "+ret.size()+" results.");
+ }
+ return ret;
+ }
+
/**
* Used to identify and properly display Harvested objects on the dataverse page.
*
- */
+ *//*
+ @Deprecated
public Map getAllHarvestedDataverseDescriptions(){
- String qstr = "SELECT dataverse_id, archiveDescription FROM harvestingDataverseConfig;";
+ String qstr = "SELECT dataverse_id, archiveDescription FROM harvestingClient;";
List searchResults = null;
try {
@@ -449,7 +466,7 @@ public Map getAllHarvestedDataverseDescriptions(){
}
return ret;
- }
+ }*/
public void populateDvSearchCard(SolrSearchResult solrSearchResult) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java
index edec5fbe008..3770c54750a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java
@@ -39,7 +39,7 @@ public User getUser() {
return user;
}
- public void setUser(AuthenticatedUser aUser) {
+ public void setUser(User aUser) {
logSvc.log(
new ActionLogRecord(ActionLogRecord.ActionType.SessionManagement,(aUser==null) ? "logout" : "login")
.setUserIdentifier((aUser!=null) ? aUser.getIdentifier() : (user!=null ? user.getIdentifier() : "") ));
diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
index 5c59452cbed..2e58945a672 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
@@ -174,7 +174,7 @@ public List getDvObjectInfoByParentIdForMyData(List dvObjectPare
*/
public List getAllHarvestedDataverseIds(){
- String qstr = "SELECT h.dataverse_id FROM harvestingdataverseconfig h;";
+ String qstr = "SELECT h.dataverse_id FROM harvestingclient h;";
return em.createNativeQuery(qstr)
.getResultList();
diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
index baac2a13037..593408543ab 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
@@ -12,6 +12,8 @@
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
+import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.search.SearchServiceBean;
import java.util.Map;
@@ -23,6 +25,7 @@
import edu.harvard.iq.dataverse.search.SolrIndexServiceBean;
import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.util.SystemConfig;
import java.util.EnumSet;
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -67,6 +70,9 @@ public class EjbDataverseEngine {
@EJB
SearchServiceBean searchService;
+
+ @EJB
+ IngestServiceBean ingestService;
@EJB
PermissionServiceBean permissionService;
@@ -127,7 +133,13 @@ public class EjbDataverseEngine {
@EJB
AuthenticationServiceBean authentication;
-
+
+ @EJB
+ SystemConfig systemConfig;
+
+ @EJB
+ PrivateUrlServiceBean privateUrlService;
+
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
@@ -172,6 +184,11 @@ public R submit(Command aCommand) throws CommandException {
if (!granted.containsAll(required)) {
required.removeAll(granted);
logRec.setActionResult(ActionLogRecord.Result.PermissionError);
+ /**
+ * @todo Is there any harm in showing the "granted" set
+ * since we already show "required"? It would help people
+ * reason about the mismatch.
+ */
throw new PermissionException("Can't execute command " + aCommand
+ ", because request " + aCommand.getRequest()
+ " is missing permissions " + required
@@ -258,6 +275,11 @@ public SearchServiceBean search() {
return searchService;
}
+ @Override
+ public IngestServiceBean ingest() {
+ return ingestService;
+ }
+
@Override
public PermissionServiceBean permissions() {
return permissionService;
@@ -371,7 +393,17 @@ public UserNotificationServiceBean notifications() {
public AuthenticationServiceBean authentication() {
return authentication;
}
-
+
+ @Override
+ public SystemConfig systemConfig() {
+ return systemConfig;
+ }
+
+ @Override
+ public PrivateUrlServiceBean privateUrl() {
+ return privateUrlService;
+ }
+
};
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/GlobalId.java b/src/main/java/edu/harvard/iq/dataverse/GlobalId.java
index 6c160ec15d0..45f527468fe 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GlobalId.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GlobalId.java
@@ -26,6 +26,10 @@ public class GlobalId implements java.io.Serializable {
@EJB
SettingsServiceBean settingsService;
+ // I'm marking this constructor as "deprecated" because it's not reliable;
+ // it uses the parser (below) that makes incorrect assumptions about
+ // handles and dois. (see comments there)
+ @Deprecated
public GlobalId(String identifier) {
// set the protocol, authority, and identifier via parsePersistentId
@@ -109,6 +113,16 @@ public URL toURL() {
* @param persistentId
*
*/
+ // This parser makes an incorrect assumption, that a handle has to be made of 2
+ // "/"-separated parts, and a doi - of 3. ICPSR's DOIs are an example of a DOI
+ // that has 2 parts only: doi:10.3886/ICPSR24006.v2
+ // We already have working global id parsers elsewhere in the app:
+ // for ex., parseStudyIdDOI() and parseStudyIdHandle() in importDDIServiceBean;
+ // We should probably copy that code here, than change the app so that all
+ // the pieces are using this class, instead of replicating the functionality
+ // in multiple places.
+
+ @Deprecated
private boolean parsePersistentId(String persistentId){
if (persistentId==null){
diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java
new file mode 100644
index 00000000000..f88f1fa583a
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java
@@ -0,0 +1,1051 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse;
+
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.impl.CreateHarvestingClientCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.DeleteHarvestingClientCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.UpdateHarvestingClientCommand;
+import edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean;
+import edu.harvard.iq.dataverse.harvest.client.oai.OaiHandler;
+import edu.harvard.iq.dataverse.search.IndexServiceBean;
+import edu.harvard.iq.dataverse.timer.DataverseTimerServiceBean;
+import edu.harvard.iq.dataverse.util.JsfHelper;
+import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Pattern;
+import javax.ejb.EJB;
+import javax.faces.application.FacesMessage;
+import javax.faces.component.UIComponent;
+import javax.faces.component.UIInput;
+import javax.faces.context.FacesContext;
+import javax.faces.event.ActionEvent;
+import javax.faces.model.SelectItem;
+import javax.faces.view.ViewScoped;
+import javax.inject.Inject;
+import javax.inject.Named;
+import javax.servlet.http.HttpServletRequest;
+import org.apache.commons.lang.StringUtils;
+
+/**
+ *
+ * @author Leonid Andreev
+ */
+@ViewScoped
+@Named
+public class HarvestingClientsPage implements java.io.Serializable {
+
+ private static final Logger logger = Logger.getLogger(HarvestingClientsPage.class.getCanonicalName());
+
+ @Inject
+ DataverseSession session;
+ @EJB
+ AuthenticationServiceBean authSvc;
+ @EJB
+ DataverseServiceBean dataverseService;
+ @EJB
+ HarvestingClientServiceBean harvestingClientService;
+ @EJB
+ HarvesterServiceBean harvesterService;
+ @EJB
+ DatasetServiceBean datasetService;
+ @EJB
+ IndexServiceBean indexService;
+ @EJB
+ EjbDataverseEngine engineService;
+ @EJB
+ DataverseTimerServiceBean dataverseTimerService;
+ @Inject
+ DataverseRequestServiceBean dvRequestService;
+ @Inject
+ NavigationWrapper navigationWrapper;
+
+ private List configuredHarvestingClients;
+ private Dataverse dataverse;
+ private Long dataverseId = null;
+ private HarvestingClient selectedClient;
+
+ //private static final String solrDocIdentifierDataset = "dataset_";
+
+ public enum PageMode {
+
+ VIEW, CREATE, EDIT, DELETE
+ }
+ private PageMode pageMode = PageMode.VIEW;
+
+ public enum CreateStep {
+ ONE, TWO, THREE, FOUR
+ }
+
+ private CreateStep createStep = CreateStep.ONE;
+
+ private Dataverse selectedDestinationDataverse;
+
+ public void setSelectedDestinationDataverse(Dataverse dv) {
+ this.selectedDestinationDataverse = dv;
+ }
+
+ public Dataverse getSelectedDestinationDataverse() {
+ return this.selectedDestinationDataverse;
+ }
+
+ public List completeSelectedDataverse(String query) {
+ return dataverseService.filterByAliasQuery(query);
+ }
+
+ public String init() {
+ if (!isSessionUserAuthenticated()) {
+ return "/loginpage.xhtml" + navigationWrapper.getRedirectPage();
+ } else if (!isSuperUser()) {
+ return navigationWrapper.notAuthorized();
+ }
+
+ if (dataverseId != null) {
+ setDataverse(dataverseService.find(getDataverseId()));
+ if (getDataverse() == null) {
+ return navigationWrapper.notFound();
+ }
+ } else {
+ setDataverse(dataverseService.findRootDataverse());
+ }
+
+ configuredHarvestingClients = harvestingClientService.getAllHarvestingClients();
+
+ pageMode = PageMode.VIEW;
+ FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, JH.localize("harvestclients.title"), JH.localize("harvestclients.toptip")));
+ return null;
+ }
+
+ public List getConfiguredHarvestingClients() {
+ return configuredHarvestingClients;
+ }
+
+ public void setConfiguredHarvestingClients(List configuredClients) {
+ configuredHarvestingClients = configuredClients;
+ }
+
+ public Dataverse getDataverse() {
+ return dataverse;
+ }
+
+ public void setDataverse(Dataverse dataverse) {
+ this.dataverse = dataverse;
+ }
+
+ public Long getDataverseId() {
+ return dataverseId;
+ }
+
+ public void setDataverseId(Long dataverseId) {
+ this.dataverseId = dataverseId;
+ }
+
+ public void setSelectedClient(HarvestingClient harvestingClient) {
+ selectedClient = harvestingClient;
+ }
+
+ public void setClientForDelete(HarvestingClient harvestingClient) {
+ selectedClient = harvestingClient;
+ this.pageMode = PageMode.DELETE;
+ }
+
+ public HarvestingClient getSelectedClient() {
+ return selectedClient;
+ }
+
+ public PageMode getPageMode() {
+ return this.pageMode;
+ }
+
+ public void setPageMode(PageMode pageMode) {
+ this.pageMode = pageMode;
+ }
+
+ public boolean isCreateMode() {
+ return PageMode.CREATE == this.pageMode;
+ }
+
+ public boolean isEditMode() {
+ return PageMode.EDIT == this.pageMode;
+ }
+
+ public boolean isViewMode() {
+ return PageMode.VIEW == this.pageMode;
+ }
+
+ public boolean isDeleteMode() {
+ return PageMode.DELETE == this.pageMode;
+ }
+
+ public boolean isCreateStepOne() {
+ return CreateStep.ONE == this.createStep;
+ }
+
+ public boolean isCreateStepTwo() {
+ return CreateStep.TWO == this.createStep;
+ }
+
+ public boolean isCreateStepThree() {
+ return CreateStep.THREE == this.createStep;
+ }
+
+ public boolean isCreateStepFour() {
+ return CreateStep.FOUR == this.createStep;
+ }
+
+
+ public void runHarvest(HarvestingClient harvestingClient) {
+ try {
+ DataverseRequest dataverseRequest = new DataverseRequest(session.getUser(), (HttpServletRequest)null);
+ harvesterService.doAsyncHarvest(dataverseRequest, harvestingClient);
+ } catch (Exception ex) {
+ String failMessage = "Sorry, harvest could not be started for the selected harvesting client configuration (unknown server error).";
+ JH.addMessage(FacesMessage.SEVERITY_FATAL, failMessage);
+ return;
+ }
+
+ String successMessage = JH.localize("harvestclients.actions.runharvest.success");
+ successMessage = successMessage.replace("{0}", harvestingClient.getName());
+ JsfHelper.addSuccessMessage(successMessage);
+
+ // refresh the harvesting clients list - we want this one to be showing
+ // "inprogress"; and we want to be able to disable all the actions buttons
+ // for it:
+ // (looks like we need to sleep for a few milliseconds here, to make sure
+ // it has already been updated with the "inprogress" setting)
+ try{Thread.sleep(500L);}catch(Exception e){}
+
+
+ configuredHarvestingClients = harvestingClientService.getAllHarvestingClients();
+
+
+ }
+
+ public void editClient(HarvestingClient harvestingClient) {
+ setSelectedClient(harvestingClient);
+
+ this.newNickname = harvestingClient.getName();
+ this.newHarvestingUrl = harvestingClient.getHarvestingUrl();
+ this.initialSettingsValidated = false;
+
+ // TODO: do we want to try and contact the server, again, to make
+ // sure the metadataformat and/or set are still supported?
+ // and if not, what do we do?
+ // alternatively, should we make these 2 fields not editable at all?
+
+ this.newOaiSet = !StringUtils.isEmpty(harvestingClient.getHarvestingSet()) ? harvestingClient.getHarvestingSet() : "none";
+ this.newMetadataFormat = harvestingClient.getMetadataPrefix();
+ this.newHarvestingStyle = harvestingClient.getHarvestStyle();
+
+ this.harvestTypeRadio = harvestTypeRadioOAI;
+
+ if (harvestingClient.isScheduled()) {
+ if (HarvestingClient.SCHEDULE_PERIOD_DAILY.equals(harvestingClient.getSchedulePeriod())) {
+ this.harvestingScheduleRadio = harvestingScheduleRadioDaily;
+ setHourOfDayAMPMfromInteger(harvestingClient.getScheduleHourOfDay());
+
+ } else if (HarvestingClient.SCHEDULE_PERIOD_DAILY.equals(harvestingClient.getSchedulePeriod())) {
+ this.harvestingScheduleRadio = harvestingScheduleRadioWeekly;
+ setHourOfDayAMPMfromInteger(harvestingClient.getScheduleHourOfDay());
+ setWeekdayFromInteger(harvestingClient.getScheduleDayOfWeek());
+
+ } else {
+ // ok, the client is marked as "scheduled" - but the actual
+ // schedule type is not specified.
+ // so we'll show it as unscheduled on the edit form:
+ this.harvestingScheduleRadio = harvestingScheduleRadioNone;
+ this.newHarvestingScheduleDayOfWeek = "Sunday";
+ this.newHarvestingScheduleTimeOfDay = "12";
+ this.harvestingScheduleRadioAMPM = harvestingScheduleRadioAM;
+ }
+ } else {
+ this.harvestingScheduleRadio = harvestingScheduleRadioNone;
+ // unscheduled; but we populate this values to act as the defaults
+ // if they decide to schedule it and toggle the form to show the
+ // time and/or day pulldowns:
+ this.newHarvestingScheduleDayOfWeek = "Sunday";
+ this.newHarvestingScheduleTimeOfDay = "12";
+ this.harvestingScheduleRadioAMPM = harvestingScheduleRadioAM;
+ }
+
+ this.createStep = CreateStep.ONE;
+ this.pageMode = PageMode.EDIT;
+
+ }
+
+
+ public void deleteClient() {
+ if (selectedClient != null) {
+
+ //configuredHarvestingClients.remove(selectedClient);
+
+ logger.info("proceeding to delete harvesting client "+selectedClient.getName());
+ try {
+ harvestingClientService.setDeleteInProgress(selectedClient.getId());
+
+ //engineService.submit(new DeleteHarvestingClientCommand(dvRequestService.getDataverseRequest(), selectedClient));
+ harvestingClientService.deleteClient(selectedClient.getId());
+ JsfHelper.addInfoMessage(JH.localize("harvestclients.tab.header.action.delete.infomessage"));
+
+ //} catch (CommandException ex) {
+ // String failMessage = "Selected harvesting client cannot be deleted.";
+ // JH.addMessage(FacesMessage.SEVERITY_FATAL, failMessage);
+ } catch (Exception ex) {
+ String failMessage = "Selected harvesting client cannot be deleted; unknown exception: "+ex.getMessage();
+ JH.addMessage(FacesMessage.SEVERITY_FATAL, failMessage);
+ }
+ } else {
+ logger.warning("Delete called, with a null selected harvesting client");
+ }
+
+ selectedClient = null;
+ configuredHarvestingClients = harvestingClientService.getAllHarvestingClients();
+ this.pageMode = PageMode.VIEW;
+
+ }
+
+ public void createClient(ActionEvent ae) {
+
+ HarvestingClient newHarvestingClient = new HarvestingClient(); // will be set as type OAI by default
+
+ newHarvestingClient.setName(newNickname);
+
+ if (getSelectedDestinationDataverse() == null) {
+ JsfHelper.JH.addMessage(FacesMessage.SEVERITY_ERROR,
+ "Failed to create a new Harvesting Client configuration: no destination dataverse selected.");
+ }
+
+ newHarvestingClient.setDataverse(getSelectedDestinationDataverse());
+ if (getSelectedDestinationDataverse().getHarvestingClientConfigs() == null) {
+ getSelectedDestinationDataverse().setHarvestingClientConfigs(new ArrayList<>());
+ }
+ getSelectedDestinationDataverse().getHarvestingClientConfigs().add(newHarvestingClient);
+
+ newHarvestingClient.setHarvestingUrl(newHarvestingUrl);
+ if (!StringUtils.isEmpty(newOaiSet)) {
+ newHarvestingClient.setHarvestingSet(newOaiSet);
+ }
+ newHarvestingClient.setMetadataPrefix(newMetadataFormat);
+ newHarvestingClient.setHarvestStyle(newHarvestingStyle);
+
+ if (isNewHarvestingScheduled()) {
+ newHarvestingClient.setScheduled(true);
+
+ if (isNewHarvestingScheduledWeekly()) {
+ newHarvestingClient.setSchedulePeriod(HarvestingClient.SCHEDULE_PERIOD_WEEKLY);
+ if (getWeekDayNumber() == null) {
+ // create a "week day is required..." error message, etc.
+ // but we may be better off not even giving them an opportunity
+ // to leave the field blank - ?
+ }
+ newHarvestingClient.setScheduleDayOfWeek(getWeekDayNumber());
+ } else {
+ newHarvestingClient.setSchedulePeriod(HarvestingClient.SCHEDULE_PERIOD_DAILY);
+ }
+
+ if (getHourOfDay() == null) {
+ // see the comment above, about the day of week. same here.
+ }
+ newHarvestingClient.setScheduleHourOfDay(getHourOfDay());
+ }
+
+ // make default archive url (used to generate links pointing back to the
+ // archival sources, when harvested datasets are displayed in search results),
+ // from the harvesting url:
+ newHarvestingClient.setArchiveUrl(makeDefaultArchiveUrl());
+ // set default description - they can customize it as they see fit:
+ newHarvestingClient.setArchiveDescription(JH.localize("harvestclients.viewEditDialog.archiveDescription.default.generic"));
+
+
+ // will try to save it now:
+
+ try {
+ newHarvestingClient = engineService.submit( new CreateHarvestingClientCommand(dvRequestService.getDataverseRequest(), newHarvestingClient));
+
+ configuredHarvestingClients = harvestingClientService.getAllHarvestingClients();
+
+ // NO, we no longer create timers here. It is the job of the Mother Timer!
+ //dataverseTimerService.createHarvestTimer(newHarvestingClient);
+
+ String successMessage = JH.localize("harvestclients.newClientDialog.success");
+ successMessage = successMessage.replace("{0}", newHarvestingClient.getName());
+ JsfHelper.addSuccessMessage(successMessage);
+
+ } /* TODO: (?) add a dedicated "NameAlreadyExists" exception for the
+ create client command?
+ catch ( CreateHarvestingClientCommand.NicknameAlreadyExistsException naee ) {
+ FacesContext.getCurrentInstance().addMessage(newHarvestingClient.getName(),
+ new FacesMessage( FacesMessage.SEVERITY_ERROR, naee.getMessage(), null));
+
+ }*/ catch (CommandException ex) {
+ logger.log(Level.WARNING, "Harvesting client creation command failed", ex);
+ JsfHelper.JH.addMessage(FacesMessage.SEVERITY_ERROR,
+ "Harvesting Client creation command failed.",
+ ex.getMessage());
+ } catch (Exception ex) {
+ JH.addMessage(FacesMessage.SEVERITY_FATAL, "Harvesting client creation failed (reason unknown).");
+ logger.log(Level.SEVERE, "Harvesting client creation failed (reason unknown)." + ex.getMessage(), ex);
+ }
+ setPageMode(PageMode.VIEW);
+
+
+ }
+
+ // this saves an existing client that the user has edited:
+
+ public void saveClient(ActionEvent ae) {
+
+ HarvestingClient harvestingClient = getSelectedClient();
+
+ if (harvestingClient == null) {
+ // TODO:
+ // tell the user somehow that the client cannot be saved, and advise
+ // them to save the settings they have entered.
+ // as of now - we will show an error message, but only after the
+ // edit form has been closed.
+ }
+
+ // nickname is not editable for existing clients:
+ //harvestingClient.setName(newNickname);
+ harvestingClient.setHarvestingUrl(newHarvestingUrl);
+ harvestingClient.setHarvestingSet(newOaiSet);
+ harvestingClient.setMetadataPrefix(newMetadataFormat);
+ harvestingClient.setHarvestStyle(newHarvestingStyle);
+
+ if (isNewHarvestingScheduled()) {
+ harvestingClient.setScheduled(true);
+
+ if (isNewHarvestingScheduledWeekly()) {
+ harvestingClient.setSchedulePeriod(HarvestingClient.SCHEDULE_PERIOD_WEEKLY);
+ if (getWeekDayNumber() == null) {
+ // create a "week day is required..." error message, etc.
+ // but we may be better off not even giving them an opportunity
+ // to leave the field blank - ?
+ }
+ harvestingClient.setScheduleDayOfWeek(getWeekDayNumber());
+ } else {
+ harvestingClient.setSchedulePeriod(HarvestingClient.SCHEDULE_PERIOD_DAILY);
+ }
+
+ if (getHourOfDay() == null) {
+ // see the comment above, about the day of week. same here.
+ }
+ harvestingClient.setScheduleHourOfDay(getHourOfDay());
+ }
+
+ // will try to save it now:
+
+ try {
+ harvestingClient = engineService.submit( new UpdateHarvestingClientCommand(dvRequestService.getDataverseRequest(), harvestingClient));
+
+ configuredHarvestingClients = harvestingClientService.getAllHarvestingClients();
+
+ if (!harvestingClient.isScheduled()) {
+ dataverseTimerService.removeHarvestTimer(harvestingClient);
+ }
+ JsfHelper.addSuccessMessage("Succesfully updated harvesting client " + harvestingClient.getName());
+
+ } catch (CommandException ex) {
+ logger.log(Level.WARNING, "Failed to save harvesting client", ex);
+ JsfHelper.JH.addMessage(FacesMessage.SEVERITY_ERROR,
+ "Failed to save harvesting client",
+ ex.getMessage());
+ } catch (Exception ex) {
+ JH.addMessage(FacesMessage.SEVERITY_FATAL, "Failed to save harvesting client (reason unknown).");
+ logger.log(Level.SEVERE, "Failed to save harvesting client (reason unknown)." + ex.getMessage(), ex);
+ }
+ setPageMode(PageMode.VIEW);
+
+
+ }
+
+ public void validateMetadataFormat(FacesContext context, UIComponent toValidate, Object rawValue) {
+ String value = (String) rawValue;
+ UIInput input = (UIInput) toValidate;
+ input.setValid(true); // Optimistic approach
+
+ // metadataFormats are selected from a pulldown that's populated with
+ // the values returned by the remote OAI server.
+ // the only validation we want is to make sure the select one from the
+ // menu.
+ if (context.getExternalContext().getRequestParameterMap().get("DO_VALIDATION") != null
+ && StringUtils.isEmpty(value)) {
+
+ input.setValid(false);
+ context.addMessage(toValidate.getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestclients.newClientDialog.oaiMetadataFormat.required")));
+
+ }
+ }
+
+ public boolean validateNickname() {
+
+ if ( !StringUtils.isEmpty(getNewNickname()) ) {
+
+ if (getNewNickname().length() > 30 || (!Pattern.matches("^[a-zA-Z0-9\\_\\-]+$", getNewNickname())) ) {
+ //input.setValid(false);
+ FacesContext.getCurrentInstance().addMessage(getNewClientNicknameInputField().getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestclients.newClientDialog.nickname.invalid")));
+ return false;
+
+ // If it passes the regex test, check
+ } else if ( harvestingClientService.findByNickname(getNewNickname()) != null ) {
+ //input.setValid(false);
+ FacesContext.getCurrentInstance().addMessage(getNewClientNicknameInputField().getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestclients.newClientDialog.nickname.alreadyused")));
+ return false;
+ }
+ return true;
+ }
+
+ // Nickname field is empty:
+ FacesContext.getCurrentInstance().addMessage(getNewClientNicknameInputField().getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestclients.newClientDialog.nickname.required")));
+ return false;
+ }
+
+ public boolean validateSelectedDataverse() {
+ if (selectedDestinationDataverse == null) {
+ FacesContext.getCurrentInstance().addMessage(getSelectedDataverseMenu().getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestclients.newClientDialog.dataverse.required")));
+ return false;
+ }
+ return true;
+ }
+
+ public boolean validateServerUrlOAI() {
+ if (!StringUtils.isEmpty(getNewHarvestingUrl())) {
+
+ OaiHandler oaiHandler = new OaiHandler(getNewHarvestingUrl());
+ boolean success = true;
+ String message = null;
+
+ // First, we'll try to obtain the list of supported metadata formats:
+ try {
+ List formats = oaiHandler.runListMetadataFormats();
+ if (formats != null && formats.size() > 0) {
+ createOaiMetadataFormatSelectItems(formats);
+ } else {
+ success = false;
+ message = "received empty list from ListMetadataFormats";
+ }
+
+ // TODO: differentiate between different exceptions/failure scenarios } catch (OaiHandlerException ohee) {
+ } catch (Exception ex) {
+ success = false;
+ message = "Failed to execute listmetadataformats; " + ex.getMessage();
+
+ }
+
+ if (success) {
+ logger.info("metadataformats: success");
+ logger.info(getOaiMetadataFormatSelectItems().size() + " metadata formats total.");
+ } else {
+ logger.info("metadataformats: failed;"+message);
+ }
+ // And if that worked, the list of sets provided:
+
+ if (success) {
+ try {
+ List sets = oaiHandler.runListSets();
+ createOaiSetsSelectItems(sets);
+ } catch (Exception ex) {
+ //success = false;
+ // ok - we'll try and live without sets for now...
+ // (since listMetadataFormats has succeeded earlier, may
+ // be safe to assume that this OAI server is at least
+ // somewhat functioning...)
+ // (XOAI ListSets buggy as well?)
+ message = "Failed to execute ListSets; " + ex.getMessage();
+ logger.warning(message);
+ }
+ }
+
+ if (success) {
+ return true;
+ }
+
+ FacesContext.getCurrentInstance().addMessage(getNewClientUrlInputField().getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", getNewHarvestingUrl() + ": " + JH.localize("harvestclients.newClientDialog.url.invalid")));
+ return false;
+
+ }
+ FacesContext.getCurrentInstance().addMessage(getNewClientUrlInputField().getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", getNewHarvestingUrl() + ": " + JH.localize("harvestclients.newClientDialog.url.required")));
+ return false;
+ }
+
+ public void validateInitialSettings() {
+ if (isHarvestTypeOAI()) {
+ boolean nicknameValidated = true;
+ boolean destinationDataverseValidated = true;
+ if (isCreateMode()) {
+ nicknameValidated = validateNickname();
+ destinationDataverseValidated = validateSelectedDataverse();
+ }
+ boolean urlValidated = validateServerUrlOAI();
+
+ if (nicknameValidated && destinationDataverseValidated && urlValidated) {
+ // In Create mode we want to run all 3 validation tests; this is why
+ // we are not doing "if ((validateNickname() && validateServerUrlOAI())"
+ // in the line above. -- L.A. 4.4 May 2016.
+
+ setInitialSettingsValidated(true);
+ this.createStep = CreateStep.TWO;
+ }
+ // (and if not - it stays set to false)
+ }
+ }
+
+ public void backToStepOne() {
+ this.initialSettingsValidated = false;
+ this.createStep = CreateStep.ONE;
+ }
+
+ public void goToStepThree() {
+ this.createStep = CreateStep.THREE;
+ }
+
+ public void backToStepTwo() {
+ this.createStep = CreateStep.TWO;
+ }
+
+ public void goToStepFour() {
+ this.createStep = CreateStep.FOUR;
+ }
+
+ public void backToStepThree() {
+ this.createStep = CreateStep.THREE;
+ }
+
+ /*
+ * Variables and methods for creating a new harvesting client:
+ */
+
+ private int harvestTypeRadio; // 1 = OAI; 2 = Nesstar
+ private static int harvestTypeRadioOAI = 1;
+ private static int harvestTypeRadioNesstar = 2;
+
+ UIInput newClientNicknameInputField;
+ UIInput newClientUrlInputField;
+ UIInput hiddenInputField;
+ /*UISelectOne*/ UIInput metadataFormatMenu;
+ UIInput selectedDataverseMenu;
+
+ private String newNickname = "";
+ private String newHarvestingUrl = "";
+ private boolean initialSettingsValidated = false;
+ private String newOaiSet = "";
+ private String newMetadataFormat = "";
+ private String newHarvestingStyle = "";
+
+ private int harvestingScheduleRadio;
+
+ private static final int harvestingScheduleRadioNone = 0;
+ private static final int harvestingScheduleRadioDaily = 1;
+ private static final int harvestingScheduleRadioWeekly = 2;
+
+ private String newHarvestingScheduleDayOfWeek = "Sunday";
+ private String newHarvestingScheduleTimeOfDay = "12";
+
+ private int harvestingScheduleRadioAMPM;
+ private static final int harvestingScheduleRadioAM = 0;
+ private static final int harvestingScheduleRadioPM = 1;
+
+
+ public void initNewClient(ActionEvent ae) {
+ //this.selectedClient = new HarvestingClient();
+ this.newNickname = "";
+ this.newHarvestingUrl = "";
+ this.initialSettingsValidated = false;
+ this.newOaiSet = "";
+ this.newMetadataFormat = "";
+ this.newHarvestingStyle = HarvestingClient.HARVEST_STYLE_DATAVERSE;
+
+ this.harvestTypeRadio = harvestTypeRadioOAI;
+ this.harvestingScheduleRadio = harvestingScheduleRadioNone;
+
+ this.newHarvestingScheduleDayOfWeek = "Sunday";
+ this.newHarvestingScheduleTimeOfDay = "12";
+
+ this.harvestingScheduleRadioAMPM = harvestingScheduleRadioAM;
+
+ this.pageMode = PageMode.CREATE;
+ this.createStep = CreateStep.ONE;
+ this.selectedDestinationDataverse = null;
+
+ }
+
+ public boolean isInitialSettingsValidated() {
+ return this.initialSettingsValidated;
+ }
+
+ public void setInitialSettingsValidated(boolean validated) {
+ this.initialSettingsValidated = validated;
+ }
+
+
+ public String getNewNickname() {
+ return newNickname;
+ }
+
+ public void setNewNickname(String newNickname) {
+ this.newNickname = newNickname;
+ }
+
+ public String getNewHarvestingUrl() {
+ return newHarvestingUrl;
+ }
+
+ public void setNewHarvestingUrl(String newHarvestingUrl) {
+ this.newHarvestingUrl = newHarvestingUrl;
+ }
+
+ public int getHarvestTypeRadio() {
+ return this.harvestTypeRadio;
+ }
+
+ public void setHarvestTypeRadio(int harvestTypeRadio) {
+ this.harvestTypeRadio = harvestTypeRadio;
+ }
+
+ public boolean isHarvestTypeOAI() {
+ return harvestTypeRadioOAI == harvestTypeRadio;
+ }
+
+ public boolean isHarvestTypeNesstar() {
+ return harvestTypeRadioNesstar == harvestTypeRadio;
+ }
+
+ public String getNewOaiSet() {
+ return newOaiSet;
+ }
+
+ public void setNewOaiSet(String newOaiSet) {
+ this.newOaiSet = newOaiSet;
+ }
+
+ public String getNewMetadataFormat() {
+ return newMetadataFormat;
+ }
+
+ public void setNewMetadataFormat(String newMetadataFormat) {
+ this.newMetadataFormat = newMetadataFormat;
+ }
+
+ public String getNewHarvestingStyle() {
+ return newHarvestingStyle;
+ }
+
+ public void setNewHarvestingStyle(String newHarvestingStyle) {
+ this.newHarvestingStyle = newHarvestingStyle;
+ }
+
+ public int getHarvestingScheduleRadio() {
+ return this.harvestingScheduleRadio;
+ }
+
+ public void setHarvestingScheduleRadio(int harvestingScheduleRadio) {
+ this.harvestingScheduleRadio = harvestingScheduleRadio;
+ }
+
+ public boolean isNewHarvestingScheduled() {
+ return this.harvestingScheduleRadio != harvestingScheduleRadioNone;
+ }
+
+ public boolean isNewHarvestingScheduledWeekly() {
+ return this.harvestingScheduleRadio == harvestingScheduleRadioWeekly;
+ }
+
+ public boolean isNewHarvestingScheduledDaily() {
+ return this.harvestingScheduleRadio == harvestingScheduleRadioDaily;
+ }
+
+ public String getNewHarvestingScheduleDayOfWeek() {
+ return newHarvestingScheduleDayOfWeek;
+ }
+
+ public void setNewHarvestingScheduleDayOfWeek(String newHarvestingScheduleDayOfWeek) {
+ this.newHarvestingScheduleDayOfWeek = newHarvestingScheduleDayOfWeek;
+ }
+
+ public String getNewHarvestingScheduleTimeOfDay() {
+ return newHarvestingScheduleTimeOfDay;
+ }
+
+ public void setNewHarvestingScheduleTimeOfDay(String newHarvestingScheduleTimeOfDay) {
+ this.newHarvestingScheduleTimeOfDay = newHarvestingScheduleTimeOfDay;
+ }
+
+ public int getHarvestingScheduleRadioAMPM() {
+ return this.harvestingScheduleRadioAMPM;
+ }
+
+ public void setHarvestingScheduleRadioAMPM(int harvestingScheduleRadioAMPM) {
+ this.harvestingScheduleRadioAMPM = harvestingScheduleRadioAMPM;
+ }
+
+ public boolean isHarvestingScheduleTimeOfDayPM() {
+ return getHarvestingScheduleRadioAMPM() == harvestingScheduleRadioPM;
+ }
+
+ public void toggleNewClientSchedule() {
+
+ }
+
+
+ public UIInput getNewClientNicknameInputField() {
+ return newClientNicknameInputField;
+ }
+
+ public void setNewClientNicknameInputField(UIInput newClientInputField) {
+ this.newClientNicknameInputField = newClientInputField;
+ }
+
+ public UIInput getNewClientUrlInputField() {
+ return newClientUrlInputField;
+ }
+
+ public void setNewClientUrlInputField(UIInput newClientInputField) {
+ this.newClientUrlInputField = newClientInputField;
+ }
+
+ public UIInput getHiddenInputField() {
+ return hiddenInputField;
+ }
+
+ public void setHiddenInputField(UIInput hiddenInputField) {
+ this.hiddenInputField = hiddenInputField;
+ }
+
+ public UIInput getMetadataFormatMenu() {
+ return metadataFormatMenu;
+ }
+
+ public void setMetadataFormatMenu(UIInput metadataFormatMenu) {
+ this.metadataFormatMenu = metadataFormatMenu;
+ }
+
+ public UIInput getSelectedDataverseMenu() {
+ return selectedDataverseMenu;
+ }
+
+ public void setSelectedDataverseMenu(UIInput selectedDataverseMenu) {
+ this.selectedDataverseMenu = selectedDataverseMenu;
+ }
+
+ private List oaiSetsSelectItems;
+
+ public List getOaiSetsSelectItems() {
+ return oaiSetsSelectItems;
+ }
+
+ public void setOaiSetsSelectItems(List oaiSetsSelectItems) {
+ this.oaiSetsSelectItems = oaiSetsSelectItems;
+ }
+
+ private void createOaiSetsSelectItems(List setNames) {
+ setOaiSetsSelectItems(new ArrayList<>());
+ if (setNames != null) {
+ for (String set: setNames) {
+ if (!StringUtils.isEmpty(set)) {
+ getOaiSetsSelectItems().add(new SelectItem(set, set));
+ }
+ }
+ }
+ }
+
+ private List oaiMetadataFormatSelectItems;
+
+ public List getOaiMetadataFormatSelectItems() {
+ return oaiMetadataFormatSelectItems;
+ }
+
+ public void setOaiMetadataFormatSelectItems(List oaiMetadataFormatSelectItems) {
+ this.oaiMetadataFormatSelectItems = oaiMetadataFormatSelectItems;
+ }
+
+ private void createOaiMetadataFormatSelectItems(List formats) {
+ setOaiMetadataFormatSelectItems(new ArrayList<>());
+ if (formats != null) {
+ for (String f: formats) {
+ if (!StringUtils.isEmpty(f)) {
+ getOaiMetadataFormatSelectItems().add(new SelectItem(f, f));
+ }
+ }
+ }
+ }
+
+
+ private List harvestingStylesSelectItems = null;
+
+ public List getHarvestingStylesSelectItems() {
+ if (this.harvestingStylesSelectItems == null) {
+ this.harvestingStylesSelectItems = new ArrayList<>();
+ for (int i = 0; i < HarvestingClient.HARVEST_STYLE_LIST.size(); i++) {
+ String style = HarvestingClient.HARVEST_STYLE_LIST.get(i);
+ this.harvestingStylesSelectItems.add(new SelectItem(
+ style,
+ HarvestingClient.HARVEST_STYLE_INFOMAP.get(style)));
+ }
+ }
+ return this.harvestingStylesSelectItems;
+ }
+
+ public void setHarvestingStylesSelectItems(List harvestingStylesSelectItems) {
+ this.harvestingStylesSelectItems = harvestingStylesSelectItems;
+ }
+
+ private List weekDays = null;
+ private List daysOfWeekSelectItems = null;
+
+ public List getDaysOfWeekSelectItems() {
+ if (this.daysOfWeekSelectItems == null) {
+ List weekDays = getWeekDays();
+ this.daysOfWeekSelectItems = new ArrayList<>();
+ for (int i = 0; i < weekDays.size(); i++) {
+ this.daysOfWeekSelectItems.add(new SelectItem(weekDays.get(i), weekDays.get(i)));
+ }
+ }
+
+ return this.daysOfWeekSelectItems;
+ }
+
+ private List getWeekDays() {
+ if (weekDays == null) {
+ weekDays = Arrays.asList("Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday");
+ }
+ return weekDays;
+ }
+
+ private Integer getWeekDayNumber (String weekDayName) {
+ List weekDays = getWeekDays();
+ int i = 1;
+ for (String weekDayString: weekDays) {
+ if (weekDayString.equals(weekDayName)) {
+ return new Integer(i);
+ }
+ i++;
+ }
+ return null;
+ }
+
+ private Integer getWeekDayNumber() {
+ return getWeekDayNumber(getNewHarvestingScheduleDayOfWeek());
+ }
+
+ private void setWeekdayFromInteger(Integer weekday) {
+ if (weekday == null || weekday.intValue() < 1 || weekday.intValue() > 7) {
+ weekday = 1;
+ }
+ this.newHarvestingScheduleDayOfWeek = getWeekDays().get(weekday);
+ }
+
+ private Integer getHourOfDay() {
+ Integer hour = null;
+ if (getNewHarvestingScheduleTimeOfDay() != null) {
+ try {
+ hour = new Integer(getNewHarvestingScheduleTimeOfDay());
+ } catch (Exception ex) {
+ hour = null;
+ }
+ }
+
+ if (hour != null) {
+ if (hour.intValue() == 12) {
+ hour = 0;
+ }
+ if (isHarvestingScheduleTimeOfDayPM()) {
+ hour = hour + 12;
+ }
+ }
+
+ return hour;
+ }
+
+ private void setHourOfDayAMPMfromInteger(Integer hour24) {
+ if (hour24 == null || hour24.intValue() > 23) {
+ hour24 = 0;
+ }
+
+ if (hour24.intValue() > 11) {
+ hour24 = hour24.intValue() - 12;
+ this.harvestingScheduleRadioAMPM = harvestingScheduleRadioPM;
+ } else {
+ this.harvestingScheduleRadioAMPM = harvestingScheduleRadioAM;
+ }
+
+ if (hour24.intValue() == 0) {
+ this.newHarvestingScheduleTimeOfDay = "12";
+ } else {
+ this.newHarvestingScheduleTimeOfDay = hour24.toString();
+ }
+ }
+
+ private String makeDefaultArchiveUrl() {
+ String archiveUrl = null;
+
+ if (getNewHarvestingUrl() != null) {
+ int k = getNewHarvestingUrl().indexOf('/', 8);
+ if (k > -1) {
+ archiveUrl = getNewHarvestingUrl().substring(0, k);
+ }
+ }
+
+ return archiveUrl;
+ }
+
+ public void setDaysOfWeekSelectItems(List daysOfWeekSelectItems) {
+ this.daysOfWeekSelectItems = daysOfWeekSelectItems;
+ }
+
+ private List hoursOfDaySelectItems = null;
+
+ public List getHoursOfDaySelectItems() {
+ if (this.hoursOfDaySelectItems == null) {
+ this.hoursOfDaySelectItems = new ArrayList<>();
+ this.hoursOfDaySelectItems.add(new SelectItem( 12+"", "12:00"));
+ for (int i = 1; i < 12; i++) {
+ this.hoursOfDaySelectItems.add(new SelectItem(i+"", i+":00"));
+ }
+ }
+
+ return this.hoursOfDaySelectItems;
+ }
+
+ public void setHoursOfDaySelectItems(List hoursOfDaySelectItems) {
+ this.hoursOfDaySelectItems = hoursOfDaySelectItems;
+ }
+
+ public boolean isSessionUserAuthenticated() {
+
+ if (session == null) {
+ return false;
+ }
+
+ if (session.getUser() == null) {
+ return false;
+ }
+
+ if (session.getUser().isAuthenticated()) {
+ return true;
+ }
+
+ return false;
+ }
+
+ public boolean isSuperUser() {
+ return session.getUser().isSuperuser();
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
new file mode 100644
index 00000000000..cdb157f0ad4
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
@@ -0,0 +1,534 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse;
+
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.impl.CreateHarvestingClientCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.UpdateHarvestingClientCommand;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean;
+import edu.harvard.iq.dataverse.harvest.server.OAIRecord;
+import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean;
+import edu.harvard.iq.dataverse.harvest.server.OAISet;
+import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean;
+import edu.harvard.iq.dataverse.harvest.server.OaiSetException;
+import edu.harvard.iq.dataverse.util.JsfHelper;
+import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Pattern;
+import javax.ejb.EJB;
+import javax.faces.application.FacesMessage;
+import javax.faces.component.UIComponent;
+import javax.faces.component.UIInput;
+import javax.faces.context.FacesContext;
+import javax.faces.event.ActionEvent;
+import javax.faces.view.ViewScoped;
+import javax.inject.Inject;
+import javax.inject.Named;
+import org.apache.commons.lang.StringUtils;
+
+/**
+ *
+ * @author Leonid Andreev
+ */
+@ViewScoped
+@Named
+public class HarvestingSetsPage implements java.io.Serializable {
+
+ private static final Logger logger = Logger.getLogger(HarvestingSetsPage.class.getCanonicalName());
+
+ @Inject
+ DataverseSession session;
+ @EJB
+ AuthenticationServiceBean authSvc;
+ @EJB
+ DataverseServiceBean dataverseService;
+ @EJB
+ OAISetServiceBean oaiSetService;
+ @EJB
+ OAIRecordServiceBean oaiRecordService;
+
+ @EJB
+ EjbDataverseEngine engineService;
+ @EJB
+ SystemConfig systemConfig;
+
+ @Inject
+ DataverseRequestServiceBean dvRequestService;
+ @Inject
+ NavigationWrapper navigationWrapper;
+
+ private List configuredHarvestingSets;
+ private OAISet selectedSet;
+ private boolean setSpecValidated = false;
+ private boolean setQueryValidated = false;
+ private int setQueryResult = -1;
+
+ public enum PageMode {
+
+ VIEW, CREATE, EDIT
+ }
+ private PageMode pageMode = PageMode.VIEW;
+
+ private int oaiServerStatusRadio;
+
+ private static final int oaiServerStatusRadioDisabled = 0;
+ private static final int oaiServerStatusRadioEnabled = 1;
+ private UIInput newSetSpecInputField;
+ private UIInput newSetQueryInputField;
+
+ private String newSetSpec = "";
+ private String newSetDescription = "";
+ private String newSetQuery = "";
+
+ public String getNewSetSpec() {
+ return newSetSpec;
+ }
+
+ public void setNewSetSpec(String newSetSpec) {
+ this.newSetSpec = newSetSpec;
+ }
+
+ public String getNewSetDescription() {
+ return newSetDescription;
+ }
+
+ public void setNewSetDescription(String newSetDescription) {
+ this.newSetDescription = newSetDescription;
+ }
+
+ public String getNewSetQuery() {
+ return newSetQuery;
+ }
+
+ public void setNewSetQuery(String newSetQuery) {
+ this.newSetQuery = newSetQuery;
+ }
+
+ public int getOaiServerStatusRadio() {
+ return this.oaiServerStatusRadio;
+ }
+
+ public void setOaiServerStatusRadio(int oaiServerStatusRadio) {
+ this.oaiServerStatusRadio = oaiServerStatusRadio;
+ }
+
+ public String init() {
+ if (!isSessionUserAuthenticated()) {
+ return "/loginpage.xhtml" + navigationWrapper.getRedirectPage();
+ } else if (!isSuperUser()) {
+ return navigationWrapper.notAuthorized();
+ }
+
+
+ configuredHarvestingSets = oaiSetService.findAll();
+ pageMode = PageMode.VIEW;
+
+ if (isHarvestingServerEnabled()) {
+ oaiServerStatusRadio = oaiServerStatusRadioEnabled;
+ } else {
+ oaiServerStatusRadio = oaiServerStatusRadioDisabled;
+ }
+
+ FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_INFO, JH.localize("harvestserver.title"), JH.localize("harvestserver.toptip")));
+ return null;
+ }
+
+ public List getConfiguredOAISets() {
+ return configuredHarvestingSets;
+ }
+
+ public void setConfiguredOAISets(List oaiSets) {
+ configuredHarvestingSets = oaiSets;
+ }
+
+ public boolean isHarvestingServerEnabled() {
+ return systemConfig.isOAIServerEnabled();
+ }
+
+ public void toggleHarvestingServerStatus() {
+ if (isHarvestingServerEnabled()) {
+ systemConfig.disableOAIServer();
+ } else {
+ systemConfig.enableOAIServer();
+ JsfHelper.addSuccessMessage(JH.localize("harvestserver.service.enable.success"));
+ }
+ }
+
+ public UIInput getNewSetSpecInputField() {
+ return newSetSpecInputField;
+ }
+
+ public void setNewSetSpecInputField(UIInput newSetSpecInputField) {
+ this.newSetSpecInputField = newSetSpecInputField;
+ }
+
+ public UIInput getNewSetQueryInputField() {
+ return newSetQueryInputField;
+ }
+
+ public void setNewSetQueryInputField(UIInput newSetQueryInputField) {
+ this.newSetQueryInputField = newSetQueryInputField;
+ }
+
+ public void disableHarvestingServer() {
+ systemConfig.disableOAIServer();
+ }
+
+ public void setSelectedSet(OAISet oaiSet) {
+ selectedSet = oaiSet;
+ }
+
+ public OAISet getSelectedSet() {
+ return selectedSet;
+ }
+
+ // init method when the user clicks 'add new set':
+ public void initNewSet(ActionEvent ae) {
+
+ this.newSetSpec = "";
+ this.newSetDescription = "";
+ this.newSetQuery = "";
+
+ this.pageMode = PageMode.CREATE;
+ this.setSpecValidated = false;
+ this.setQueryValidated = false;
+ this.setQueryResult = -1;
+
+ }
+
+ // init method when the user clicks 'edit existing set':
+ public void editSet(OAISet oaiSet) {
+ this.newSetSpec = oaiSet.getSpec();
+ this.newSetDescription = oaiSet.getDescription();
+ this.newSetQuery = oaiSet.getDefinition();
+
+ this.pageMode = PageMode.EDIT;
+ this.setSpecValidated = false;
+ this.setQueryValidated = false;
+ this.setQueryResult = -1;
+
+ setSelectedSet(oaiSet);
+ }
+
+ public void createSet(ActionEvent ae) {
+
+ OAISet newOaiSet = new OAISet();
+
+
+ newOaiSet.setSpec(getNewSetSpec());
+ newOaiSet.setName(getNewSetSpec());
+ newOaiSet.setDescription(getNewSetDescription());
+ newOaiSet.setDefinition(getNewSetQuery());
+
+ boolean success = false;
+
+ try {
+ oaiSetService.save(newOaiSet);
+ configuredHarvestingSets = oaiSetService.findAll();
+ String successMessage = JH.localize("harvestserver.newSetDialog.success");
+ successMessage = successMessage.replace("{0}", newOaiSet.getSpec());
+ JsfHelper.addSuccessMessage(successMessage);
+ success = true;
+
+ } catch (Exception ex) {
+ JH.addMessage(FacesMessage.SEVERITY_FATAL, "Failed to create OAI set");
+ logger.log(Level.SEVERE, "Failed to create OAI set" + ex.getMessage(), ex);
+ }
+
+ if (success) {
+ OAISet savedSet = oaiSetService.findBySpec(getNewSetSpec());
+ if (savedSet != null) {
+ runSetExport(savedSet);
+ configuredHarvestingSets = oaiSetService.findAll();
+ }
+ }
+
+ setPageMode(HarvestingSetsPage.PageMode.VIEW);
+ }
+
+ // this saves an existing set that the user has edited:
+
+ public void saveSet(ActionEvent ae) {
+
+ OAISet oaiSet = getSelectedSet();
+
+ if (oaiSet == null) {
+ // TODO:
+ // tell the user somehow that the set cannot be saved, and advise
+ // them to save the settings they have entered.
+ }
+
+ // Note that the nickname is not editable:
+ oaiSet.setDefinition(getNewSetQuery());
+ oaiSet.setDescription(getNewSetDescription());
+
+ // will try to save it now:
+ boolean success = false;
+
+ try {
+ oaiSetService.save(oaiSet);
+ configuredHarvestingSets = oaiSetService.findAll();
+
+ JsfHelper.addSuccessMessage("Succesfully updated OAI set "" + oaiSet.getSpec() + "".");
+ success = true;
+
+ } catch (Exception ex) {
+ JH.addMessage(FacesMessage.SEVERITY_FATAL, "Failed to update OAI set.");
+ logger.log(Level.SEVERE, "Failed to update OAI set." + ex.getMessage(), ex);
+ }
+
+ if (success) {
+ OAISet createdSet = oaiSetService.findBySpec(getNewSetSpec());
+ if (createdSet != null) {
+ runSetExport(createdSet);
+ configuredHarvestingSets = oaiSetService.findAll();
+ }
+ }
+
+ setPageMode(HarvestingSetsPage.PageMode.VIEW);
+
+
+ }
+
+ public void deleteSet() {
+ if (selectedSet != null) {
+ logger.info("proceeding to delete harvesting set "+ selectedSet.getSpec());
+ try {
+ oaiSetService.setDeleteInProgress(selectedSet.getId());
+ oaiSetService.remove(selectedSet.getId());
+ selectedSet = null;
+
+ configuredHarvestingSets = oaiSetService.findAll();
+ JsfHelper.addInfoMessage(JH.localize("harvestserver.tab.header.action.delete.infomessage"));
+ } catch (Exception ex) {
+ String failMessage = "Failed to delete harvesting set; unknown exception: "+ex.getMessage();
+ JH.addMessage(FacesMessage.SEVERITY_FATAL, failMessage);
+ }
+ } else {
+ logger.warning("Delete called, with a null selected harvesting set!");
+ }
+
+ }
+
+ public boolean isSetSpecValidated() {
+ return this.setSpecValidated;
+ }
+
+ public void setSetSpecValidated(boolean validated) {
+ this.setSpecValidated = validated;
+ }
+
+ public boolean isSetQueryValidated() {
+ return this.setQueryValidated;
+ }
+
+ public void setSetQueryValidated(boolean validated) {
+ this.setQueryValidated = validated;
+ }
+
+ public int getSetQueryResult() {
+ return this.setQueryResult;
+ }
+
+ public void setSetQueryResult(int setQueryResult) {
+ this.setQueryResult = setQueryResult;
+ }
+
+ public PageMode getPageMode() {
+ return this.pageMode;
+ }
+
+ public void setPageMode(PageMode pageMode) {
+ this.pageMode = pageMode;
+ }
+
+ public boolean isCreateMode() {
+ return PageMode.CREATE == this.pageMode;
+ }
+
+ public boolean isEditMode() {
+ return PageMode.EDIT == this.pageMode;
+ }
+
+ public boolean isViewMode() {
+ return PageMode.VIEW == this.pageMode;
+ }
+
+ public boolean isSessionUserAuthenticated() {
+
+ if (session == null) {
+ return false;
+ }
+
+ if (session.getUser() == null) {
+ return false;
+ }
+
+ if (session.getUser().isAuthenticated()) {
+ return true;
+ }
+
+ return false;
+ }
+
+ public int getSetInfoNumOfDatasets(OAISet oaiSet) {
+ String query = oaiSet.getDefinition();
+
+ try {
+ int num = oaiSetService.validateDefinitionQuery(query);
+ if (num > -1) {
+ return num;
+ }
+ } catch (OaiSetException ose) {
+ // do notghin - will return zero.
+ }
+ return 0;
+ }
+
+ public int getSetInfoNumOfExported(OAISet oaiSet) {
+ List records = oaiRecordService.findActiveOaiRecordsBySetName(oaiSet.getSpec());
+
+ if (records == null || records.isEmpty()) {
+ return 0;
+ }
+
+ return records.size();
+
+ }
+
+ public int getSetInfoNumOfDeleted(OAISet oaiSet) {
+ List records = oaiRecordService.findDeletedOaiRecordsBySetName(oaiSet.getSpec());
+
+ if (records == null || records.isEmpty()) {
+ return 0;
+ }
+
+ return records.size();
+
+ }
+
+ public void validateSetQuery() {
+ int datasetsFound = 0;
+ try {
+ datasetsFound = oaiSetService.validateDefinitionQuery(getNewSetQuery());
+ } catch (OaiSetException ose) {
+ FacesContext.getCurrentInstance().addMessage(getNewSetQueryInputField().getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", "Search failed for the query provided. Message from the Dataverse search server: "+ose.getMessage()));
+ setSetQueryValidated(false);
+ return;
+ }
+
+ setSetQueryValidated(true);
+ setSetQueryResult(datasetsFound);
+
+ }
+
+ public void backToQuery() {
+ setSetQueryValidated(false);
+ }
+
+ /*
+
+ version of validateSetSpec() that's not component-driven (must be called explicitly
+ with action="#{harvestingSetsPage.validateSetSpec}")
+
+
+ public void validateSetSpec() {
+
+ if ( !StringUtils.isEmpty(getNewSetSpec()) ) {
+
+ if (! Pattern.matches("^[a-zA-Z0-9\\_\\-]+$", getNewSetSpec()) ) {
+ //input.setValid(false);
+ FacesContext.getCurrentInstance().addMessage(getNewSetSpecInputField().getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestserver.newSetDialog.setspec.invalid")));
+ setSetSpecValidated(false);
+ return;
+
+ // If it passes the regex test, check
+ } else if ( oaiSetService.findBySpec(getNewSetSpec()) != null ) {
+ //input.setValid(false);
+ FacesContext.getCurrentInstance().addMessage(getNewSetSpecInputField().getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestserver.newSetDialog.setspec.alreadyused")));
+ setSetSpecValidated(false);
+ return;
+ }
+ setSetSpecValidated(true);
+ return;
+ }
+
+ // Nickname field is empty:
+ FacesContext.getCurrentInstance().addMessage(getNewSetSpecInputField().getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestserver.newSetDialog.setspec.required")));
+ setSetSpecValidated(false);
+ return;
+ }*/
+
+ public void validateSetSpec(FacesContext context, UIComponent toValidate, Object rawValue) {
+ String value = (String) rawValue;
+ UIInput input = (UIInput) toValidate;
+ input.setValid(true); // Optimistic approach
+
+ if (context.getExternalContext().getRequestParameterMap().get("DO_VALIDATION") != null) {
+
+ if (!StringUtils.isEmpty(value)) {
+ if (!Pattern.matches("^[a-zA-Z0-9\\_\\-]+$", value)) {
+ input.setValid(false);
+ context.addMessage(toValidate.getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestserver.newSetDialog.setspec.invalid")));
+ return;
+
+ // If it passes the regex test, check
+ } else if (oaiSetService.findBySpec(value) != null) {
+ input.setValid(false);
+ context.addMessage(toValidate.getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestserver.newSetDialog.setspec.alreadyused")));
+ return;
+ }
+
+ // set spec looks legit!
+ return;
+ }
+
+ // the field can't be left empty either:
+ input.setValid(false);
+ context.addMessage(toValidate.getClientId(),
+ new FacesMessage(FacesMessage.SEVERITY_ERROR, "", JH.localize("harvestserver.newSetDialog.setspec.required")));
+
+ }
+
+ // no validation requested - so we're cool!
+ }
+
+ // this will re-export the set in the background, asynchronously:
+ public void startSetExport(OAISet oaiSet) {
+ try {
+ runSetExport(oaiSet);
+ } catch (Exception ex) {
+ String failMessage = "Sorry, could not start re-export on selected OAI set (unknown server error).";
+ JH.addMessage(FacesMessage.SEVERITY_FATAL, failMessage);
+ return;
+ }
+
+ String successMessage = JH.localize("harvestserver.actions.runreexport.success");
+ successMessage = successMessage.replace("{0}", oaiSet.getSpec());
+ JsfHelper.addSuccessMessage(successMessage);
+ configuredHarvestingSets = oaiSetService.findAll();
+ }
+
+ public void runSetExport(OAISet oaiSet) {
+ oaiSetService.setUpdateInProgress(oaiSet.getId());
+ oaiSetService.exportOaiSetAsync(oaiSet);
+ }
+
+ public boolean isSuperUser() {
+ return session.getUser().isSuperuser();
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
index 201eb29a17e..f37b568d333 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
@@ -401,7 +401,8 @@ private void rejectAccessToRequests(AuthenticatedUser au, List files)
private boolean assignRole(RoleAssignee ra, DataFile file, DataverseRole r) {
try {
- commandEngine.submit(new AssignRoleCommand(ra, r, file, dvRequestService.getDataverseRequest()));
+ String privateUrlToken = null;
+ commandEngine.submit(new AssignRoleCommand(ra, r, file, dvRequestService.getDataverseRequest(), privateUrlToken));
JsfHelper.addSuccessMessage(r.getName() + " role assigned to " + ra.getDisplayInfo().getTitle() + " for " + file.getDisplayName() + ".");
} catch (PermissionException ex) {
JH.addMessage(FacesMessage.SEVERITY_ERROR, "The role was not able to be assigned.", "Permissions " + ex.getRequiredPermissions().toString() + " missing.");
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
index 14a5b20d81f..48724e1f97d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
@@ -398,7 +398,8 @@ private void notifyRoleChange(RoleAssignee ra, UserNotification.Type type) {
private void assignRole(RoleAssignee ra, DataverseRole r) {
try {
- commandEngine.submit(new AssignRoleCommand(ra, r, dvObject, dvRequestService.getDataverseRequest()));
+ String privateUrlToken = null;
+ commandEngine.submit(new AssignRoleCommand(ra, r, dvObject, dvRequestService.getDataverseRequest(), privateUrlToken));
JsfHelper.addSuccessMessage(r.getName() + " role assigned to " + ra.getDisplayInfo().getTitle() + " for " + StringEscapeUtils.escapeHtml(dvObject.getDisplayName()) + ".");
// don't notify if role = file downloader and object is not released
if (!(r.getAlias().equals(DataverseRole.FILE_DOWNLOADER) && !dvObject.isReleased()) ){
diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
index ee18a8a488d..82019c3db85 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
@@ -1,5 +1,6 @@
package edu.harvard.iq.dataverse;
+import edu.harvard.iq.dataverse.api.datadeposit.SwordAuth;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
@@ -8,6 +9,8 @@
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
import edu.harvard.iq.dataverse.authorization.groups.Group;
import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
+import edu.harvard.iq.dataverse.authorization.groups.GroupUtil;
+import edu.harvard.iq.dataverse.authorization.groups.impl.builtin.AuthenticatedUsers;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.Command;
@@ -375,11 +378,16 @@ public RequestPermissionQuery request( DataverseRequest req ) {
* @param permission
* @return The list of dataverses {@code user} has permission {@code permission} on.
*/
- public List getDataversesUserHasPermissionOn(User user, Permission permission) {
+ public List getDataversesUserHasPermissionOn(AuthenticatedUser user, Permission permission) {
+ Set groups = groupService.groupsFor(user);
+ String identifiers = GroupUtil.getAllIdentifiersForUser(user, groups);
/**
- * @todo What about groups? And how can we make this more performant?
+ * @todo Are there any strings in identifiers that would break this SQL
+ * query?
*/
- Query nativeQuery = em.createNativeQuery("SELECT id FROM dvobject WHERE dtype = 'Dataverse' and id in (select definitionpoint_id from roleassignment where assigneeidentifier in ('" + user.getIdentifier() + "'));");
+ String query = "SELECT id FROM dvobject WHERE dtype = 'Dataverse' and id in (select definitionpoint_id from roleassignment where assigneeidentifier in (" + identifiers + "));";
+ logger.fine("query: " + query);
+ Query nativeQuery = em.createNativeQuery(query);
List dataverseIdsToCheck = nativeQuery.getResultList();
List dataversesUserHasPermissionOn = new LinkedList<>();
for (int dvIdAsInt : dataverseIdsToCheck) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
index d632c316dcf..76601774000 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
@@ -11,8 +11,7 @@
import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.GuestUser;
-import edu.harvard.iq.dataverse.search.IndexServiceBean;
-import edu.harvard.iq.dataverse.search.SearchFields;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlUtil;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -62,7 +61,20 @@ void setup() {
predefinedRoleAssignees.put(AllUsers.get().getIdentifier(), AllUsers.get());
}
+ /**
+ * @param identifier An identifier beginning with ":" (builtin), "@"
+ * ({@link AuthenticatedUser}), "&" ({@link Group}), or "#"
+ * ({@link PrivateUrlUser}).
+ *
+ * @return A RoleAssignee (User or Group) or null.
+ *
+ * @throws IllegalArgumentException if you pass null, empty string, or an
+ * identifier that doesn't start with one of the supported characters.
+ */
public RoleAssignee getRoleAssignee(String identifier) {
+ if (identifier == null || identifier.isEmpty()) {
+ throw new IllegalArgumentException("Identifier cannot be null or empty string.");
+ }
switch (identifier.charAt(0)) {
case ':':
return predefinedRoleAssignees.get(identifier);
@@ -70,6 +82,8 @@ public RoleAssignee getRoleAssignee(String identifier) {
return authSvc.getAuthenticatedUser(identifier.substring(1));
case '&':
return groupSvc.getGroup(identifier.substring(1));
+ case '#':
+ return PrivateUrlUtil.identifier2roleAssignee(identifier);
default:
throw new IllegalArgumentException("Unsupported assignee identifier '" + identifier + "'");
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java
index be3759e61d2..3d23bbb54c1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java
@@ -39,6 +39,8 @@
query = "SELECT r FROM RoleAssignment r WHERE r.definitionPoint.id=:definitionPointId" ),
@NamedQuery( name = "RoleAssignment.listByRoleId",
query = "SELECT r FROM RoleAssignment r WHERE r.role=:roleId" ),
+ @NamedQuery( name = "RoleAssignment.listByPrivateUrlToken",
+ query = "SELECT r FROM RoleAssignment r WHERE r.privateUrlToken=:privateUrlToken" ),
@NamedQuery( name = "RoleAssignment.deleteByAssigneeIdentifier_RoleIdDefinition_PointId",
query = "DELETE FROM RoleAssignment r WHERE r.assigneeIdentifier=:userId AND r.role.id=:roleId AND r.definitionPoint.id=:definitionPointId"),
})
@@ -57,13 +59,17 @@ public class RoleAssignment implements java.io.Serializable {
@ManyToOne( cascade = CascadeType.MERGE )
@JoinColumn( nullable=false )
private DvObject definitionPoint;
+
+ @Column(nullable = true)
+ private String privateUrlToken;
public RoleAssignment() {}
- public RoleAssignment(DataverseRole aRole, RoleAssignee anAssignee, DvObject aDefinitionPoint) {
+ public RoleAssignment(DataverseRole aRole, RoleAssignee anAssignee, DvObject aDefinitionPoint, String privateUrlToken) {
role = aRole;
assigneeIdentifier = anAssignee.getIdentifier();
definitionPoint = aDefinitionPoint;
+ this.privateUrlToken = privateUrlToken;
}
public Long getId() {
@@ -97,7 +103,11 @@ public DvObject getDefinitionPoint() {
public void setDefinitionPoint(DvObject definitionPoint) {
this.definitionPoint = definitionPoint;
}
-
+
+ public String getPrivateUrlToken() {
+ return privateUrlToken;
+ }
+
@Override
public int hashCode() {
int hash = 7;
diff --git a/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java b/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java
index 99e7cded743..12d214e1ee5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java
@@ -187,7 +187,8 @@ public void assignRole(ActionEvent evt) {
private void assignRole(RoleAssignee ra, DataverseRole r) {
try {
- commandEngine.submit(new AssignRoleCommand(ra, r, dvObject, dvRequestService.getDataverseRequest()));
+ String privateUrlToken = null;
+ commandEngine.submit(new AssignRoleCommand(ra, r, dvObject, dvRequestService.getDataverseRequest(), privateUrlToken));
JH.addMessage(FacesMessage.SEVERITY_INFO, "Role " + r.getName() + " assigned to " + ra.getDisplayInfo().getTitle() + " on " + StringEscapeUtils.escapeHtml(dvObject.getDisplayName()));
} catch (CommandException ex) {
JH.addMessage(FacesMessage.SEVERITY_ERROR, "Can't assign role: " + ex.getMessage());
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
index 169921a5a39..2b2cb6f03fd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
@@ -20,12 +20,14 @@
import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.GuestUser;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.json.JsonParser;
@@ -37,6 +39,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
+import javax.ejb.EJBException;
import javax.json.Json;
import javax.json.JsonArrayBuilder;
import javax.json.JsonObject;
@@ -169,6 +172,9 @@ String getWrappedMessageWhenJson() {
@EJB
protected SavedSearchServiceBean savedSearchSvc;
+ @EJB
+ protected PrivateUrlServiceBean privateUrlSvc;
+
@PersistenceContext(unitName = "VDCNet-ejbPU")
protected EntityManager em;
@@ -189,11 +195,21 @@ public JsonParser call() throws Exception {
return new JsonParser(datasetFieldSvc, metadataBlockSvc,settingsSvc);
}
});
-
- protected RoleAssignee findAssignee( String identifier ) {
- return roleAssigneeSvc.getRoleAssignee(identifier);
- }
-
+
+ protected RoleAssignee findAssignee(String identifier) {
+ try {
+ RoleAssignee roleAssignee = roleAssigneeSvc.getRoleAssignee(identifier);
+ return roleAssignee;
+ } catch (EJBException ex) {
+ Throwable cause = ex;
+ while (cause.getCause() != null) {
+ cause = cause.getCause();
+ }
+ logger.info("Exception caught looking up RoleAssignee based on identifier '" + identifier + "': " + cause.getMessage());
+ return null;
+ }
+ }
+
/**
*
* @param apiKey the key to find the user with
@@ -227,9 +243,14 @@ protected String getRequestApiKey() {
*/
protected User findUserOrDie() throws WrappedResponse {
final String requestApiKey = getRequestApiKey();
- return ( requestApiKey == null )
- ? GuestUser.get()
- : findAuthenticatedUserOrDie(requestApiKey);
+ if (requestApiKey == null) {
+ return GuestUser.get();
+ }
+ PrivateUrlUser privateUrlUser = privateUrlSvc.getPrivateUrlUserFromToken(requestApiKey);
+ if (privateUrlUser != null) {
+ return privateUrlUser;
+ }
+ return findAuthenticatedUserOrDie(requestApiKey);
}
/**
@@ -316,6 +337,11 @@ protected T execCommand( Command cmd ) throws WrappedResponse {
throw new WrappedResponse( ex, errorResponse(Response.Status.BAD_REQUEST, ex.getMessage() ) );
} catch (PermissionException ex) {
+ /**
+ * @todo Is there any harm in exposing ex.getLocalizedMessage()?
+ * There's valuable information in there that can help people reason
+ * about permissions!
+ */
throw new WrappedResponse(errorResponse(Response.Status.UNAUTHORIZED,
"User " + cmd.getRequest().getUser().getIdentifier() + " is not permitted to perform requested action.") );
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
index a10c01b889d..13246042cc8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
@@ -21,7 +21,9 @@
import edu.harvard.iq.dataverse.PermissionServiceBean;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
import edu.harvard.iq.dataverse.authorization.users.GuestUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.dataaccess.DataFileIO;
import edu.harvard.iq.dataverse.dataaccess.DataFileZipper;
import edu.harvard.iq.dataverse.dataaccess.FileAccessIO;
@@ -946,7 +948,7 @@ private boolean isAccessAuthorized(DataFile df, String apiToken) {
}
}
- AuthenticatedUser user = null;
+ User user = null;
/**
* Authentication/authorization:
@@ -964,7 +966,11 @@ private boolean isAccessAuthorized(DataFile df, String apiToken) {
if (session.getUser().isAuthenticated()) {
user = (AuthenticatedUser) session.getUser();
} else {
- logger.fine("User associated with the session is not an authenticated user. (Guest access will be assumed).");
+ logger.fine("User associated with the session is not an authenticated user.");
+ if (session.getUser() instanceof PrivateUrlUser) {
+ logger.fine("User associated with the session is a PrivateUrlUser user.");
+ user = session.getUser();
+ }
if (session.getUser() instanceof GuestUser) {
logger.fine("User associated with the session is indeed a guest user.");
}
@@ -976,13 +982,18 @@ private boolean isAccessAuthorized(DataFile df, String apiToken) {
logger.fine("Session is null.");
}
- AuthenticatedUser apiTokenUser = null;
+ User apiTokenUser = null;
if ((apiToken != null)&&(apiToken.length()!=64)) {
// We'll also try to obtain the user information from the API token,
// if supplied:
- apiTokenUser = findUserByApiToken(apiToken);
+ try {
+ logger.fine("calling apiTokenUser = findUserOrDie()...");
+ apiTokenUser = findUserOrDie();
+ } catch (WrappedResponse wr) {
+ logger.fine("Message from findUserOrDie(): " + wr.getMessage());
+ }
if (apiTokenUser == null) {
logger.warning("API token-based auth: Unable to find a user with the API token provided.");
@@ -1001,14 +1012,14 @@ private boolean isAccessAuthorized(DataFile df, String apiToken) {
if (user != null) {
// it's not unthinkable, that a null user (i.e., guest user) could be given
// the ViewUnpublished permission!
- logger.fine("Session-based auth: user "+user.getName()+" has access rights on the non-restricted, unpublished datafile.");
+ logger.fine("Session-based auth: user " + user.getIdentifier() + " has access rights on the non-restricted, unpublished datafile.");
}
return true;
}
if (apiTokenUser != null) {
if (permissionService.userOn(apiTokenUser, df.getOwner()).has(Permission.ViewUnpublishedDataset)) {
- logger.fine("Session-based auth: user "+apiTokenUser.getName()+" has access rights on the non-restricted, unpublished datafile.");
+ logger.fine("Session-based auth: user " + apiTokenUser.getIdentifier() + " has access rights on the non-restricted, unpublished datafile.");
return true;
}
}
@@ -1037,12 +1048,12 @@ private boolean isAccessAuthorized(DataFile df, String apiToken) {
if (published) {
if (hasAccessToRestrictedBySession) {
if (user != null) {
- logger.fine("Session-based auth: user "+user.getName()+" is granted access to the restricted, published datafile.");
+ logger.fine("Session-based auth: user " + user.getIdentifier() + " is granted access to the restricted, published datafile.");
} else {
logger.fine("Session-based auth: guest user is granted access to the restricted, published datafile.");
}
} else {
- logger.fine("Token-based auth: user "+apiTokenUser.getName()+" is granted access to the restricted, published datafile.");
+ logger.fine("Token-based auth: user " + apiTokenUser.getIdentifier() + " is granted access to the restricted, published datafile.");
}
return true;
} else {
@@ -1055,7 +1066,7 @@ private boolean isAccessAuthorized(DataFile df, String apiToken) {
if (hasAccessToRestrictedBySession) {
if (permissionService.on(df.getOwner()).has(Permission.ViewUnpublishedDataset)) {
if (user != null) {
- logger.fine("Session-based auth: user " + user.getName() + " is granted access to the restricted, unpublished datafile.");
+ logger.fine("Session-based auth: user " + user.getIdentifier() + " is granted access to the restricted, unpublished datafile.");
} else {
logger.fine("Session-based auth: guest user is granted access to the restricted, unpublished datafile.");
}
@@ -1063,7 +1074,7 @@ private boolean isAccessAuthorized(DataFile df, String apiToken) {
}
} else {
if (apiTokenUser != null && permissionService.userOn(apiTokenUser, df.getOwner()).has(Permission.ViewUnpublishedDataset)) {
- logger.fine("Token-based auth: user " + apiTokenUser.getName() + " is granted access to the restricted, unpublished datafile.");
+ logger.fine("Token-based auth: user " + apiTokenUser.getIdentifier() + " is granted access to the restricted, unpublished datafile.");
}
}
}
@@ -1096,7 +1107,12 @@ private boolean isAccessAuthorized(DataFile df, String apiToken) {
// Will try to obtain the user information from the API token,
// if supplied:
- user = findUserByApiToken(apiToken);
+ try {
+ logger.fine("calling user = findUserOrDie()...");
+ user = findUserOrDie();
+ } catch (WrappedResponse wr) {
+ logger.fine("Message from findUserOrDie(): " + wr.getMessage());
+ }
if (user == null) {
logger.warning("API token-based auth: Unable to find a user with the API token provided.");
@@ -1105,32 +1121,32 @@ private boolean isAccessAuthorized(DataFile df, String apiToken) {
if (permissionService.userOn(user, df).has(Permission.DownloadFile)) {
if (published) {
- logger.fine("API token-based auth: User "+user.getName()+" has rights to access the datafile.");
+ logger.fine("API token-based auth: User " + user.getIdentifier() + " has rights to access the datafile.");
return true;
} else {
// if the file is NOT published, we will let them download the
// file ONLY if they also have the permission to view
// unpublished verions:
if (permissionService.userOn(user, df.getOwner()).has(Permission.ViewUnpublishedDataset)) {
- logger.fine("API token-based auth: User "+user.getName()+" has rights to access the (unpublished) datafile.");
+ logger.fine("API token-based auth: User " + user.getIdentifier() + " has rights to access the (unpublished) datafile.");
return true;
} else {
- logger.fine("API token-based auth: User "+user.getName()+" is not authorized to access the (unpublished) datafile.");
+ logger.fine("API token-based auth: User " + user.getIdentifier() + " is not authorized to access the (unpublished) datafile.");
}
}
} else {
- logger.fine("API token-based auth: User "+user.getName()+" is not authorized to access the datafile.");
+ logger.fine("API token-based auth: User " + user.getIdentifier() + " is not authorized to access the datafile.");
}
return false;
}
if (user != null) {
- logger.fine("Session-based auth: user " + user.getName() + " has NO access rights on the requested datafile.");
+ logger.fine("Session-based auth: user " + user.getIdentifier() + " has NO access rights on the requested datafile.");
}
if (apiTokenUser != null) {
- logger.fine("Token-based auth: user " + apiTokenUser.getName() + " has NO access rights on the requested datafile.");
+ logger.fine("Token-based auth: user " + apiTokenUser.getIdentifier() + " has NO access rights on the requested datafile.");
}
if (user == null && apiTokenUser == null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index 0c131871282..0026ec85a6b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -33,11 +33,14 @@
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
+import java.io.StringReader;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
import javax.ejb.Stateless;
+import javax.json.JsonObject;
+import javax.json.JsonReader;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
import javax.ws.rs.Produces;
@@ -416,7 +419,11 @@ public Response builtin2shib(String content) {
if (!knowsExistingPassword) {
String message = "User doesn't know password.";
problems.add(message);
- return errorResponse(Status.BAD_REQUEST, message);
+ /**
+ * @todo Someday we should make a errorResponse method that
+ * takes JSON arrays and objects.
+ */
+ return errorResponse(Status.BAD_REQUEST, problems.build().toString());
}
// response.add("knows existing password", knowsExistingPassword);
}
@@ -521,4 +528,20 @@ public Response validate() {
return okResponse(msg);
}
+ /**
+ * This method is used by an integration test in UsersIT.java to exercise
+ * bug https://github.com/IQSS/dataverse/issues/3287 . Not for use by users!
+ */
+ @Path("convertUserFromBcryptToSha1")
+ @POST
+ public Response convertUserFromBcryptToSha1(String json) {
+ JsonReader jsonReader = Json.createReader(new StringReader(json));
+ JsonObject object = jsonReader.readObject();
+ jsonReader.close();
+ BuiltinUser builtinUser = builtinUserService.find(new Long(object.getInt("builtinUserId")));
+ builtinUser.updateEncryptedPassword("4G7xxL9z11/JKN4jHPn4g9iIQck=", 0); // password is "sha-1Pass", 0 means SHA-1
+ BuiltinUser savedUser = builtinUserService.save(builtinUser);
+ return okResponse("foo: " + savedUser);
+ }
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index f61636862c3..f0af8490c3d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -1,45 +1,63 @@
package edu.harvard.iq.dataverse.api;
import edu.harvard.iq.dataverse.DOIEZIdServiceBean;
+import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetField;
+import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
import edu.harvard.iq.dataverse.DatasetFieldType;
+import edu.harvard.iq.dataverse.DatasetFieldValue;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.MetadataBlock;
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.errorResponse;
+import edu.harvard.iq.dataverse.MetadataBlockServiceBean;
+import edu.harvard.iq.dataverse.RoleAssignment;
+import edu.harvard.iq.dataverse.api.imports.ImportException;
+import edu.harvard.iq.dataverse.api.imports.ImportUtil;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.GetPrivateUrlCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.ListRoleAssignments;
import edu.harvard.iq.dataverse.engine.command.impl.ListVersionsCommand;
import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.SetDatasetCitationDateCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetTargetURLCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
+import edu.harvard.iq.dataverse.export.ExportService;
import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.SystemConfig;
import edu.harvard.iq.dataverse.util.json.JsonParseException;
+import edu.harvard.iq.dataverse.util.json.JsonParser;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
import java.io.OutputStream;
import java.io.StringReader;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
@@ -47,6 +65,11 @@
import javax.json.JsonArrayBuilder;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
+import javax.json.JsonReader;
+import javax.validation.ConstraintViolation;
+import javax.validation.Validation;
+import javax.validation.Validator;
+import javax.validation.ValidatorFactory;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
@@ -76,9 +99,19 @@ public class Datasets extends AbstractApiBean {
@EJB
DDIExportServiceBean ddiExportService;
-
+
@EJB
SystemConfig systemConfig;
+
+ @EJB
+ DatasetFieldServiceBean datasetfieldService;
+
+ @EJB
+ MetadataBlockServiceBean metadataBlockService;
+
+ @EJB
+ SettingsServiceBean settingsService;
+
/**
* Used to consolidate the way we parse and handle dataset versions.
@@ -93,22 +126,146 @@ private interface DsVersionHandler {
@GET
@Path("{id}")
- public Response getDataset( @PathParam("id") String id) {
-
+ public Response getDataset(@PathParam("id") String id) {
+
try {
final DataverseRequest r = createDataverseRequest(findUserOrDie());
-
+
Dataset retrieved = execCommand(new GetDatasetCommand(r, findDatasetOrDie(id)));
DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(r, retrieved));
final JsonObjectBuilder jsonbuilder = json(retrieved);
-
+
return okResponse(jsonbuilder.add("latestVersion", (latest != null) ? json(latest) : null));
- } catch ( WrappedResponse ex ) {
- return ex.refineResponse( "GETting dataset " + id + " failed." );
- }
+ } catch (WrappedResponse ex) {
+ return ex.refineResponse("GETting dataset " + id + " failed.");
+ }
+
+ }
+
+ /* An experimental method for creating a new dataset, from scratch, all from json metadata file
+ @POST
+ @Path("")
+ public Response createDataset(String jsonBody) {
+ Dataset importedDataset = null;
+ try {
+ final DataverseRequest r = createDataverseRequest(findUserOrDie());
+
+ StringReader rdr = new StringReader(jsonBody);
+ JsonObject json = Json.createReader(rdr).readObject();
+ JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
+ parser.setLenient(true);
+ Dataset ds = parser.parseDataset(json);
+
+
+ Dataverse owner = dataverseService.find(1L);
+ ds.setOwner(owner);
+ ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
+
+ // Check data against required contraints
+ List violations = ds.getVersions().get(0).validateRequired();
+ if (!violations.isEmpty()) {
+ // For migration and harvest, add NA for missing required values
+ for (ConstraintViolation v : violations) {
+ DatasetField f = ((DatasetField) v.getRootBean());
+ f.setSingleValue(DatasetField.NA_VALUE);
+ }
+ }
+
+
+ Set invalidViolations = ds.getVersions().get(0).validate();
+ ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
+ Validator validator = factory.getValidator();
+ if (!invalidViolations.isEmpty()) {
+ for (ConstraintViolation v : invalidViolations) {
+ DatasetFieldValue f = ((DatasetFieldValue) v.getRootBean());
+ boolean fixed = false;
+ boolean converted = false;
+ // TODO: Is this scrubbing something we want to continue doing?
+ //
+ //if (settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
+ // fixed = processMigrationValidationError(f, cleanupLog, metadataFile.getName());
+ // converted = true;
+ // if (fixed) {
+ // Set> scrubbedViolations = validator.validate(f);
+ // if (!scrubbedViolations.isEmpty()) {
+ // fixed = false;
+ // }
+ // }
+ //}
+ if (!fixed) {
+ String msg = "Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; "
+ + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
+ Logger.getLogger(Datasets.class.getName()).log(Level.INFO, null, msg);
+ f.setValue(DatasetField.NA_VALUE);
+ }
+ }
+ }
+
+ //ds.setHarvestedFrom(harvestingClient);
+ //ds.setHarvestIdentifier(harvestIdentifier);
+
+ importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, r, false, ImportUtil.ImportType.HARVEST));
+
+ } catch (JsonParseException ex) {
+ Logger.getLogger(Datasets.class.getName()).log(Level.INFO, null, "Error parsing datasetVersion: " + ex.getMessage());
+ return errorResponse(Response.Status.NOT_FOUND, "error parsing dataset");
+ } catch (CommandException ex) {
+ Logger.getLogger(Datasets.class.getName()).log(Level.INFO, null, "Error excuting Create dataset command: " + ex.getMessage());
+ return errorResponse(Response.Status.NOT_FOUND, "error executing create dataset command");
+ } catch (WrappedResponse ex) {
+ return ex.refineResponse("Error: "+ex.getWrappedMessageWhenJson());
+ }
+ final JsonObjectBuilder jsonbuilder = json(importedDataset);
+
+ return okResponse(jsonbuilder.add("latestVersion", json(importedDataset.getLatestVersion())));
+ } */
+
+ // TODO:
+ // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand
+ // to obtain the dataset that we are trying to export - which would handle
+ // Auth in the process... For now, Auth isn't necessary - since export ONLY
+ // WORKS on published datasets, which are open to the world. -- L.A. 4.5
+
+ @GET
+ @Path("/export")
+ @Produces({"application/xml", "application/json"})
+ public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter) {
+
+ try {
+ Dataset dataset = datasetService.findByGlobalId(persistentId);
+ if (dataset == null) {
+ return errorResponse(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
+ }
+
+ ExportService instance = ExportService.getInstance();
+
+ String xml = instance.getExportAsString(dataset, exporter);
+ // I'm wondering if this going to become a performance problem
+ // with really GIANT datasets,
+ // the fact that we are passing these exports, blobs of JSON, and,
+ // especially, DDI XML as complete strings. It would be nicer
+ // if we could stream instead - and the export service already can
+ // give it to as as a stream; then we could start sending the
+ // output to the remote client as soon as we got the first bytes,
+ // without waiting for the whole thing to be generated and buffered...
+ // (the way Access API streams its output).
+ // -- L.A., 4.5
+
+ LOGGER.fine("xml to return: " + xml);
+ String mediaType = MediaType.TEXT_PLAIN;
+ if (instance.isXMLFormat(exporter)){
+ mediaType = MediaType.APPLICATION_XML;
+ }
+ return Response.ok()
+ .entity(xml)
+ .type(mediaType).
+ build();
+ } catch (Exception wr) {
+ return errorResponse(Response.Status.FORBIDDEN, "Export Failed");
+ }
}
-
+
@DELETE
@Path("{id}")
public Response deleteDataset( @PathParam("id") String id) {
@@ -494,6 +651,7 @@ Dataset findDatasetOrDie( String id ) throws WrappedResponse {
@GET
@Path("ddi")
@Produces({"application/xml", "application/json"})
+ @Deprecated
public Response getDdi(@QueryParam("id") long id, @QueryParam("persistentId") String persistentId, @QueryParam("dto") boolean dto) {
boolean ddiExportEnabled = systemConfig.isDdiExportEnabled();
if (!ddiExportEnabled) {
@@ -518,7 +676,7 @@ public Response getDdi(@QueryParam("id") long id, @QueryParam("persistentId") St
* to getLatestVersion
*/
final JsonObjectBuilder datasetAsJson = jsonAsDatasetDto(dataset.getLatestVersion());
- xml = DdiExportUtil.datasetDtoAsJson2ddi(datasetAsJson.build().toString());
+ xml = DdiExportUtil.datasetDtoAsJson2ddi(datasetAsJson.toString());
} else {
OutputStream outputStream = new ByteArrayOutputStream();
ddiExportService.exportDataset(dataset.getId(), outputStream, null, null);
@@ -534,7 +692,7 @@ public Response getDdi(@QueryParam("id") long id, @QueryParam("persistentId") St
return wr.getResponse();
}
}
-
+
/**
* @todo Make this real. Currently only used for API testing. Copied from
* the equivalent API endpoint for dataverses and simplified with values
@@ -554,12 +712,71 @@ public Response createAssignment(String userOrGroup, @PathParam("identifier") St
return errorResponse(Response.Status.BAD_REQUEST, "Assignee not found");
}
DataverseRole theRole = rolesSvc.findBuiltinRoleByAlias("admin");
+ String privateUrlToken = null;
return okResponse(
- json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(findUserOrDie())))));
+ json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(findUserOrDie()), privateUrlToken))));
} catch (WrappedResponse ex) {
LOGGER.log(Level.WARNING, "Can''t create assignment: {0}", ex.getMessage());
return ex.getResponse();
}
}
+ @GET
+ @Path("{identifier}/assignments")
+ public Response getAssignments(@PathParam("identifier") String id) {
+ try {
+ JsonArrayBuilder jab = Json.createArrayBuilder();
+ for (RoleAssignment ra : execCommand(new ListRoleAssignments(createDataverseRequest(findUserOrDie()), findDatasetOrDie(id)))) {
+ jab.add(json(ra));
+ }
+ return okResponse(jab);
+ } catch (WrappedResponse ex) {
+ LOGGER.log(Level.WARNING, "Can't list assignments: {0}", ex.getMessage());
+ return ex.getResponse();
+ }
+ }
+
+ @GET
+ @Path("{id}/privateUrl")
+ public Response getPrivateUrlData(@PathParam("id") String idSupplied) {
+ try {
+ PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied)));
+ if (privateUrl != null) {
+ return okResponse(json(privateUrl));
+ } else {
+ return errorResponse(Response.Status.NOT_FOUND, "Private URL not found.");
+ }
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+ }
+
+ @POST
+ @Path("{id}/privateUrl")
+ public Response createPrivateUrl(@PathParam("id") String idSupplied) {
+ try {
+ return okResponse(json(execCommand(new CreatePrivateUrlCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied)))));
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+ }
+
+ @DELETE
+ @Path("{id}/privateUrl")
+ public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
+ try {
+ User user = findUserOrDie();
+ Dataset dataset = findDatasetOrDie(idSupplied);
+ PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(createDataverseRequest(user), dataset));
+ if (privateUrl != null) {
+ execCommand(new DeletePrivateUrlCommand(createDataverseRequest(user), dataset));
+ return okResponse("Private URL deleted.");
+ } else {
+ return errorResponse(Response.Status.NOT_FOUND, "No Private URL to delete.");
+ }
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+ }
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
index a787f3e26ee..2c0322e719c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
@@ -311,7 +311,7 @@ public Response setMetadataRoot( @PathParam("identifier")String dvIdtf, String b
try {
Dataverse dataverse = findDataverseOrDie(dvIdtf);
execute(new UpdateDataverseMetadataBlocksCommand.SetRoot(createDataverseRequest(findUserOrDie()), dataverse, root));
- return okResponseWithValue("Dataverse " + dataverse.getName() + " is now a metadata root");
+ return okResponseWithValue("Dataverse " + dataverse.getName() + " is now a metadata " + (root? "" : "non-") + "root");
} catch (WrappedResponse wr) {
return wr.getResponse();
}
@@ -462,10 +462,11 @@ public Response createAssignment( RoleAssignmentDTO ra, @PathParam("identifier")
if ( theRole == null ) {
return errorResponse( Status.BAD_REQUEST, "Can't find role named '" + ra.getRole() + "' in dataverse " + dataverse);
}
+ String privateUrlToken = null;
return okResponse(
json(
- execCommand( new AssignRoleCommand(assignee, theRole, dataverse, createDataverseRequest(findUserOrDie())))));
+ execCommand(new AssignRoleCommand(assignee, theRole, dataverse, createDataverseRequest(findUserOrDie()), privateUrlToken))));
} catch (WrappedResponse ex) {
LOGGER.log(Level.WARNING, "Can''t create assignment: {0}", ex.getMessage());
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java
new file mode 100644
index 00000000000..8df72e4f9ca
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java
@@ -0,0 +1,315 @@
+package edu.harvard.iq.dataverse.api;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.DataverseServiceBean;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.impl.CreateHarvestingClientCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.GetHarvestingClientCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.UpdateHarvestingClientCommand;
+import edu.harvard.iq.dataverse.harvest.client.ClientHarvestRun;
+import edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean;
+import edu.harvard.iq.dataverse.util.json.JsonParseException;
+import javax.json.JsonObjectBuilder;
+import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Logger;
+import javax.ejb.EJB;
+import javax.ejb.Stateless;
+import javax.json.Json;
+import javax.json.JsonArrayBuilder;
+import javax.json.JsonObject;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Response;
+
+@Stateless
+@Path("harvest/clients")
+public class HarvestingClients extends AbstractApiBean {
+
+
+ @EJB
+ DataverseServiceBean dataverseService;
+ @EJB
+ HarvesterServiceBean harvesterService;
+ @EJB
+ HarvestingClientServiceBean harvestingClientService;
+
+ private static final Logger logger = Logger.getLogger(HarvestingClients.class.getName());
+ /*
+ * /api/harvest/clients
+ * and
+ * /api/harvest/clients/{nickname}
+ * will, by default, return a JSON record with the information about the
+ * configured remote archives.
+ * optionally, plain text output may be provided as well.
+ */
+ @GET
+ @Path("")
+ public Response harvestingClients(@QueryParam("key") String apiKey) throws IOException {
+
+ List harvestingClients = null;
+ try {
+ harvestingClients = harvestingClientService.getAllHarvestingClients();
+ } catch (Exception ex) {
+ return errorResponse( Response.Status.INTERNAL_SERVER_ERROR, "Caught an exception looking up configured harvesting clients; " + ex.getMessage() );
+ }
+
+ if (harvestingClients == null) {
+ // returning an empty list:
+ return okResponse(jsonObjectBuilder().add("harvestingClients",""));
+ }
+
+ JsonArrayBuilder hcArr = Json.createArrayBuilder();
+
+ for (HarvestingClient harvestingClient : harvestingClients) {
+ // We already have this harvestingClient - wny do we need to
+ // execute this "Get HarvestingClients Client Command" in order to get it,
+ // again? - the purpose of the command is to run the request through
+ // the Authorization system, to verify that they actually have
+ // the permission to view this harvesting client config. -- L.A. 4.4
+ HarvestingClient retrievedHarvestingClient = null;
+ try {
+ DataverseRequest req = createDataverseRequest(findUserOrDie());
+ retrievedHarvestingClient = execCommand( new GetHarvestingClientCommand(req, harvestingClient));
+ } catch (Exception ex) {
+ // Don't do anything.
+ // We'll just skip this one - since this means the user isn't
+ // authorized to view this client configuration.
+ }
+
+ if (retrievedHarvestingClient != null) {
+ hcArr.add(harvestingConfigAsJson(retrievedHarvestingClient));
+ }
+ }
+
+ return okResponse(jsonObjectBuilder().add("harvestingClients", hcArr));
+ }
+
+ @GET
+ @Path("{nickName}")
+ public Response harvestingClient(@PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException {
+
+ HarvestingClient harvestingClient = null;
+ try {
+ harvestingClient = harvestingClientService.findByNickname(nickName);
+ } catch (Exception ex) {
+ logger.warning("Exception caught looking up harvesting client " + nickName + ": " + ex.getMessage());
+ return errorResponse( Response.Status.BAD_REQUEST, "Internal error: failed to look up harvesting client " + nickName + ".");
+ }
+
+ if (harvestingClient == null) {
+ return errorResponse(Response.Status.NOT_FOUND, "Harvesting client " + nickName + " not found.");
+ }
+
+ HarvestingClient retrievedHarvestingClient = null;
+
+ try {
+ // findUserOrDie() and execCommand() both throw WrappedResponse
+ // exception, that already has a proper HTTP response in it.
+
+ retrievedHarvestingClient = execCommand(new GetHarvestingClientCommand(createDataverseRequest(findUserOrDie()), harvestingClient));
+ logger.info("retrieved Harvesting Client " + retrievedHarvestingClient.getName() + " with the GetHarvestingClient command.");
+ } catch (WrappedResponse wr) {
+ return wr.getResponse();
+ } catch (Exception ex) {
+ logger.warning("Unknown exception caught while executing GetHarvestingClientCommand: "+ex.getMessage());
+ retrievedHarvestingClient = null;
+ }
+
+ if (retrievedHarvestingClient == null) {
+ return errorResponse( Response.Status.BAD_REQUEST,
+ "Internal error: failed to retrieve harvesting client " + nickName + ".");
+ }
+
+ try {
+ return okResponse(harvestingConfigAsJson(retrievedHarvestingClient));
+ } catch (Exception ex) {
+ logger.warning("Unknown exception caught while trying to format harvesting client config as json: "+ex.getMessage());
+ return errorResponse( Response.Status.BAD_REQUEST,
+ "Internal error: failed to produce output for harvesting client " + nickName + ".");
+ }
+ }
+
+ @POST
+ @Path("{nickName}")
+ public Response createHarvestingClient(String jsonBody, @PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
+
+ try ( StringReader rdr = new StringReader(jsonBody) ) {
+ JsonObject json = Json.createReader(rdr).readObject();
+
+ HarvestingClient harvestingClient = new HarvestingClient();
+ // TODO: check that it doesn't exist yet...
+ harvestingClient.setName(nickName);
+ String dataverseAlias = jsonParser().parseHarvestingClient(json, harvestingClient);
+ Dataverse ownerDataverse = dataverseService.findByAlias(dataverseAlias);
+
+ if (ownerDataverse == null) {
+ return errorResponse(Response.Status.BAD_REQUEST, "No such dataverse: " + dataverseAlias);
+ }
+
+ harvestingClient.setDataverse(ownerDataverse);
+ if (ownerDataverse.getHarvestingClientConfigs() == null) {
+ ownerDataverse.setHarvestingClientConfigs(new ArrayList<>());
+ }
+ ownerDataverse.getHarvestingClientConfigs().add(harvestingClient);
+
+ DataverseRequest req = createDataverseRequest(findUserOrDie());
+ HarvestingClient managedHarvestingClient = execCommand( new CreateHarvestingClientCommand(req, harvestingClient));
+ return createdResponse( "/harvest/clients/" + nickName, harvestingConfigAsJson(managedHarvestingClient));
+
+ } catch (JsonParseException ex) {
+ return errorResponse( Response.Status.BAD_REQUEST, "Error parsing harvesting client: " + ex.getMessage() );
+
+ } catch (WrappedResponse ex) {
+ return ex.getResponse();
+
+ }
+
+ }
+
+ @PUT
+ @Path("{nickName}")
+ public Response modifyHarvestingClient(String jsonBody, @PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
+ HarvestingClient harvestingClient = null;
+ try {
+ harvestingClient = harvestingClientService.findByNickname(nickName);
+ } catch (Exception ex) {
+ // We don't care what happened; we'll just assume we couldn't find it.
+ harvestingClient = null;
+ }
+
+ if (harvestingClient == null) {
+ return errorResponse( Response.Status.NOT_FOUND, "Harvesting client " + nickName + " not found.");
+ }
+
+ String ownerDataverseAlias = harvestingClient.getDataverse().getAlias();
+
+ try ( StringReader rdr = new StringReader(jsonBody) ) {
+ DataverseRequest req = createDataverseRequest(findUserOrDie());
+ JsonObject json = Json.createReader(rdr).readObject();
+
+ String newDataverseAlias = jsonParser().parseHarvestingClient(json, harvestingClient);
+
+ if (newDataverseAlias != null
+ && !newDataverseAlias.equals("")
+ && !newDataverseAlias.equals(ownerDataverseAlias)) {
+ return errorResponse(Response.Status.BAD_REQUEST, "Bad \"dataverseAlias\" supplied. Harvesting client "+nickName+" belongs to the dataverse "+ownerDataverseAlias);
+ }
+ HarvestingClient managedHarvestingClient = execCommand( new UpdateHarvestingClientCommand(req, harvestingClient));
+ return createdResponse( "/datasets/" + nickName, harvestingConfigAsJson(managedHarvestingClient));
+
+ } catch (JsonParseException ex) {
+ return errorResponse( Response.Status.BAD_REQUEST, "Error parsing harvesting client: " + ex.getMessage() );
+
+ } catch (WrappedResponse ex) {
+ return ex.getResponse();
+
+ }
+
+ }
+
+ // TODO:
+ // add a @DELETE method
+ // (there is already a DeleteHarvestingClient command)
+
+ // Methods for managing harvesting runs (jobs):
+
+
+ // This POST starts a new harvesting run:
+ @POST
+ @Path("{nickName}/run")
+ public Response startHarvestingJob(@PathParam("nickName") String clientNickname, @QueryParam("key") String apiKey) throws IOException {
+
+ try {
+ AuthenticatedUser authenticatedUser = null;
+
+ try {
+ authenticatedUser = findAuthenticatedUserOrDie();
+ } catch (WrappedResponse wr) {
+ return errorResponse(Response.Status.UNAUTHORIZED, "Authentication required to use this API method");
+ }
+
+ if (authenticatedUser == null || !authenticatedUser.isSuperuser()) {
+ return errorResponse(Response.Status.FORBIDDEN, "Only the Dataverse Admin user can run harvesting jobs");
+ }
+
+ HarvestingClient harvestingClient = harvestingClientService.findByNickname(clientNickname);
+
+ if (harvestingClient == null) {
+ return errorResponse(Response.Status.NOT_FOUND, "No such dataverse: "+clientNickname);
+ }
+
+ DataverseRequest dataverseRequest = createDataverseRequest(authenticatedUser);
+ harvesterService.doAsyncHarvest(dataverseRequest, harvestingClient);
+
+ } catch (Exception e) {
+ return this.errorResponse(Response.Status.BAD_REQUEST, "Exception thrown when running harvesting client\""+clientNickname+"\" via REST API; " + e.getMessage());
+ }
+ return this.accepted();
+ }
+
+ // This GET shows the status of the harvesting run in progress for this
+ // client, if present:
+ // @GET
+ // @Path("{nickName}/run")
+ // TODO:
+
+ // This DELETE kills the harvesting run in progress for this client,
+ // if present:
+ // @DELETE
+ // @Path("{nickName}/run")
+ // TODO:
+
+
+
+
+
+ /* Auxiliary, helper methods: */
+
+ /*
+ @Deprecated
+ public static JsonArrayBuilder harvestingConfigsAsJsonArray(List harvestingDataverses) {
+ JsonArrayBuilder hdArr = Json.createArrayBuilder();
+
+ for (Dataverse hd : harvestingDataverses) {
+ hdArr.add(harvestingConfigAsJson(hd.getHarvestingClientConfig()));
+ }
+ return hdArr;
+ }*/
+
+ public static JsonObjectBuilder harvestingConfigAsJson(HarvestingClient harvestingConfig) {
+ if (harvestingConfig == null) {
+ return null;
+ }
+
+
+ return jsonObjectBuilder().add("nickName", harvestingConfig.getName()).
+ add("dataverseAlias", harvestingConfig.getDataverse().getAlias()).
+ add("type", harvestingConfig.getHarvestType()).
+ add("harvestUrl", harvestingConfig.getHarvestingUrl()).
+ add("archiveUrl", harvestingConfig.getArchiveUrl()).
+ add("archiveDescription",harvestingConfig.getArchiveDescription()).
+ add("metadataFormat", harvestingConfig.getMetadataPrefix()).
+ add("set", harvestingConfig.getHarvestingSet() == null ? "N/A" : harvestingConfig.getHarvestingSet()).
+ add("schedule", harvestingConfig.isScheduled() ? harvestingConfig.getScheduleDescription() : "none").
+ add("status", harvestingConfig.isHarvestingNow() ? "inProgress" : "inActive").
+ add("lastHarvest", harvestingConfig.getLastHarvestTime() == null ? "N/A" : harvestingConfig.getLastHarvestTime().toString()).
+ add("lastResult", harvestingConfig.getLastResult()).
+ add("lastSuccessful", harvestingConfig.getLastSuccessfulHarvestTime() == null ? "N/A" : harvestingConfig.getLastSuccessfulHarvestTime().toString()).
+ add("lastNonEmpty", harvestingConfig.getLastNonEmptyHarvestTime() == null ? "N/A" : harvestingConfig.getLastNonEmptyHarvestTime().toString()).
+ add("lastDatasetsHarvested", harvestingConfig.getLastHarvestedDatasetCount() == null ? "N/A" : harvestingConfig.getLastHarvestedDatasetCount().toString()).
+ add("lastDatasetsDeleted", harvestingConfig.getLastDeletedDatasetCount() == null ? "N/A" : harvestingConfig.getLastDeletedDatasetCount().toString()).
+ add("lastDatasetsFailed", harvestingConfig.getLastFailedDatasetCount() == null ? "N/A" : harvestingConfig.getLastFailedDatasetCount().toString());
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java
new file mode 100644
index 00000000000..f54190597b2
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java
@@ -0,0 +1,204 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.api;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.DataverseServiceBean;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.impl.CreateHarvestingClientCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.GetHarvestingClientCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.UpdateHarvestingClientCommand;
+import edu.harvard.iq.dataverse.harvest.client.ClientHarvestRun;
+import edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean;
+import edu.harvard.iq.dataverse.harvest.server.OAISet;
+import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean;
+import edu.harvard.iq.dataverse.util.json.JsonParseException;
+import javax.json.JsonObjectBuilder;
+import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.List;
+import java.util.logging.Logger;
+import javax.ejb.EJB;
+import javax.ejb.Stateless;
+import javax.json.Json;
+import javax.json.JsonArrayBuilder;
+import javax.json.JsonObject;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Response;
+
+/**
+ *
+ * @author Leonid Andreev
+ */
+@Stateless
+@Path("harvest/server/oaisets")
+public class HarvestingServer extends AbstractApiBean {
+ @EJB
+ OAISetServiceBean oaiSetService;
+
+ private static final Logger logger = Logger.getLogger(HarvestingServer.class.getName());
+
+ // TODO: this should be available to admin only.
+
+ @GET
+ @Path("")
+ public Response oaiSets(@QueryParam("key") String apiKey) throws IOException {
+
+
+ List oaiSets = null;
+ try {
+ oaiSets = oaiSetService.findAll();
+ } catch (Exception ex) {
+ return errorResponse(Response.Status.INTERNAL_SERVER_ERROR, "Caught an exception looking up available OAI sets; " + ex.getMessage());
+ }
+
+ if (oaiSets == null) {
+ // returning an empty list:
+ return okResponse(jsonObjectBuilder().add("oaisets", ""));
+ }
+
+ JsonArrayBuilder hcArr = Json.createArrayBuilder();
+
+ for (OAISet set : oaiSets) {
+ hcArr.add(oaiSetAsJson(set));
+ }
+
+ return okResponse(jsonObjectBuilder().add("oaisets", hcArr));
+ }
+
+ @GET
+ @Path("{specname}")
+ public Response oaiSet(@PathParam("specname") String spec, @QueryParam("key") String apiKey) throws IOException {
+
+ OAISet set = null;
+ try {
+ set = oaiSetService.findBySpec(spec);
+ } catch (Exception ex) {
+ logger.warning("Exception caught looking up OAI set " + spec + ": " + ex.getMessage());
+ return errorResponse( Response.Status.BAD_REQUEST, "Internal error: failed to look up OAI set " + spec + ".");
+ }
+
+ if (set == null) {
+ return errorResponse(Response.Status.NOT_FOUND, "OAI set " + spec + " not found.");
+ }
+
+ try {
+ return okResponse(oaiSetAsJson(set));
+ } catch (Exception ex) {
+ logger.warning("Unknown exception caught while trying to format OAI set " + spec + " as json: "+ex.getMessage());
+ return errorResponse( Response.Status.BAD_REQUEST,
+ "Internal error: failed to produce output for OAI set " + spec + ".");
+ }
+ }
+
+ @POST
+ @Path("{specname}")
+ public Response createOaiSet(String jsonBody, @PathParam("specname") String spec, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
+
+ //try () {
+ StringReader rdr = new StringReader(jsonBody);
+ JsonObject json = Json.createReader(rdr).readObject();
+
+ OAISet set = new OAISet();
+ // TODO: check that it doesn't exist yet...
+ set.setSpec(spec);
+ // TODO: jsonParser().parseOaiSet(json, set);
+
+ oaiSetService.save(set);
+
+ return createdResponse( "/harvest/server/oaisets" + spec, oaiSetAsJson(set));
+
+ //} catch (JsonParseException ex) {
+ // return errorResponse( Response.Status.BAD_REQUEST, "Error parsing OAI set: " + ex.getMessage() );
+
+ //} catch (WrappedResponse ex) {
+ // return ex.getResponse();
+ //}
+ }
+
+ @PUT
+ @Path("{nickName}")
+ public Response modifyOaiSet(String jsonBody, @PathParam("specname") String spec, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
+ // TODO:
+ // ...
+ return createdResponse("/harvest/server/oaisets" + spec, null);
+ }
+
+ @DELETE
+ @Path("{specname}")
+ public Response deleteOaiSet(@PathParam("specname") String spec, @QueryParam("key") String apiKey) {
+ OAISet set = null;
+ try {
+ set = oaiSetService.findBySpec(spec);
+ } catch (Exception ex) {
+ logger.warning("Exception caught looking up OAI set " + spec + ": " + ex.getMessage());
+ return errorResponse( Response.Status.BAD_REQUEST, "Internal error: failed to look up OAI set " + spec + ".");
+ }
+
+ if (set == null) {
+ return errorResponse(Response.Status.NOT_FOUND, "OAI set " + spec + " not found.");
+ }
+
+ try {
+ oaiSetService.setDeleteInProgress(set.getId());
+ oaiSetService.remove(set.getId());
+ } catch (Exception ex) {
+ return errorResponse( Response.Status.BAD_REQUEST, "Internal error: failed to delete OAI set " + spec + "; " + ex.getMessage());
+ }
+
+ return okResponse("OAI Set " + spec + " deleted");
+
+ }
+
+ @GET
+ @Path("{specname}/datasets")
+ public Response oaiSetListDatasets(@PathParam("specname") String spec, @QueryParam("key") String apiKey) throws IOException {
+ OAISet set = null;
+ try {
+ set = oaiSetService.findBySpec(spec);
+ } catch (Exception ex) {
+ logger.warning("Exception caught looking up OAI set " + spec + ": " + ex.getMessage());
+ return errorResponse( Response.Status.BAD_REQUEST, "Internal error: failed to look up OAI set " + spec + ".");
+ }
+
+ return okResponse("");
+
+ }
+
+ /* Auxiliary, helper methods: */
+ public static JsonArrayBuilder oaiSetsAsJsonArray(List oaiSets) {
+ JsonArrayBuilder hdArr = Json.createArrayBuilder();
+
+ for (OAISet set : oaiSets) {
+ hdArr.add(oaiSetAsJson(set));
+ }
+ return hdArr;
+ }
+
+ public static JsonObjectBuilder oaiSetAsJson(OAISet set) {
+ if (set == null) {
+ return null;
+ }
+
+ return jsonObjectBuilder().add("name", set.getName()).
+ add("spec", set.getSpec()).
+ add("description", set.getDescription()).
+ add("definition", set.getDefinition()).
+ add("version", set.getVersion());
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
new file mode 100644
index 00000000000..24122b7c28d
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
@@ -0,0 +1,26 @@
+package edu.harvard.iq.dataverse.api;
+
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import javax.ejb.EJB;
+import javax.json.Json;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.core.Response;
+
+@Path("info")
+public class Info extends AbstractApiBean {
+
+ @EJB
+ SettingsServiceBean settingsService;
+
+ @GET
+ @Path("settings/:DatasetPublishPopupCustomText")
+ public Response getDatasetPublishPopupCustomText() {
+ String setting = settingsService.getValueForKey(SettingsServiceBean.Key.DatasetPublishPopupCustomText);
+ if (setting != null) {
+ return okResponse(Json.createObjectBuilder().add("message", setting));
+ } else {
+ return notFound("Setting " + SettingsServiceBean.Key.DatasetPublishPopupCustomText + " not found");
+ }
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java b/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java
new file mode 100644
index 00000000000..b77954bf1a5
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java
@@ -0,0 +1,54 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.api;
+
+import edu.harvard.iq.dataverse.DatasetServiceBean;
+import java.util.logging.Logger;
+import javax.ejb.EJB;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Response;
+
+/**
+ *
+ * @author Leonid Andreev
+ *
+ */
+
+@Path("admin/metadata")
+public class Metadata extends AbstractApiBean {
+ private static final Logger logger = Logger.getLogger(Metadata.class.getName());
+
+ @EJB
+ DatasetServiceBean datasetService;
+
+ // The following 2 commands start export all jobs in the background,
+ // asynchronously.
+ // (These API calls should probably not be here;
+ // May be under "/admin" somewhere?)
+ // exportAll will attempt to go through all the published, local
+ // datasets *that haven't been exported yet* - which is determined by
+ // checking the lastexporttime value of the dataset; if it's null, or < the last
+ // publication date = "unexported" - and export them.
+ @GET
+ @Path("/exportAll")
+ @Produces("application/json")
+ public Response exportAll() {
+ datasetService.exportAllAsync();
+ return this.accepted();
+ }
+
+ // reExportAll will FORCE A FULL REEXPORT on every published, local
+ // dataset, regardless of the lastexporttime value.
+ @GET
+ @Path("/reExportAll")
+ @Produces("application/json")
+ public Response reExportAll() {
+ datasetService.reExportAllAsync();
+ return this.accepted();
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java
index 166780baf79..47006561594 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java
@@ -7,6 +7,7 @@
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.EjbDataverseEngine;
+import edu.harvard.iq.dataverse.PermissionServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand;
@@ -40,6 +41,8 @@ public class CollectionDepositManagerImpl implements CollectionDepositManager {
DataverseServiceBean dataverseService;
@EJB
DatasetServiceBean datasetService;
+ @EJB
+ PermissionServiceBean permissionService;
@Inject
SwordAuth swordAuth;
@Inject
@@ -54,16 +57,16 @@ public class CollectionDepositManagerImpl implements CollectionDepositManager {
SwordServiceBean swordService;
@EJB
SettingsServiceBean settingsService;
-
+
private HttpServletRequest request;
-
+
@Override
public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCredentials authCredentials, SwordConfiguration config)
throws SwordError, SwordServerException, SwordAuthException {
AuthenticatedUser user = swordAuth.auth(authCredentials);
- DataverseRequest dvReq = new DataverseRequest( user, request);
-
+ DataverseRequest dvReq = new DataverseRequest(user, request);
+
urlManager.processUrl(collectionUri);
String dvAlias = urlManager.getTargetIdentifier();
if (urlManager.getTargetType().equals("dataverse") && dvAlias != null) {
@@ -74,112 +77,111 @@ public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCrede
if (dvThatWillOwnDataset != null) {
- if (swordAuth.hasAccessToModifyDataverse(dvReq, dvThatWillOwnDataset)) {
-
- logger.log(Level.FINE, "multipart: {0}", deposit.isMultipart());
- logger.log(Level.FINE, "binary only: {0}", deposit.isBinaryOnly());
- logger.log(Level.FINE, "entry only: {0}", deposit.isEntryOnly());
- logger.log(Level.FINE, "in progress: {0}", deposit.isInProgress());
- logger.log(Level.FINE, "metadata relevant: {0}", deposit.isMetadataRelevant());
-
- if (deposit.isEntryOnly()) {
- // do a sanity check on the XML received
- try {
- SwordEntry swordEntry = deposit.getSwordEntry();
- logger.log(Level.FINE, "deposit XML received by createNew():\n{0}", swordEntry.toString());
- } catch (ParseException ex) {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can not create dataset due to malformed Atom entry: " + ex);
- }
+ logger.log(Level.FINE, "multipart: {0}", deposit.isMultipart());
+ logger.log(Level.FINE, "binary only: {0}", deposit.isBinaryOnly());
+ logger.log(Level.FINE, "entry only: {0}", deposit.isEntryOnly());
+ logger.log(Level.FINE, "in progress: {0}", deposit.isInProgress());
+ logger.log(Level.FINE, "metadata relevant: {0}", deposit.isMetadataRelevant());
- Dataset dataset = new Dataset();
- dataset.setOwner(dvThatWillOwnDataset);
- String nonNullDefaultIfKeyNotFound = "";
- String protocol = settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound);
- String authority = settingsService.getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound);
- String separator = settingsService.getValueForKey(SettingsServiceBean.Key.DoiSeparator, nonNullDefaultIfKeyNotFound);
- dataset.setProtocol(protocol);
- dataset.setAuthority(authority);
- dataset.setDoiSeparator(separator);
- dataset.setIdentifier(datasetService.generateIdentifierSequence(protocol, authority, separator));
- logger.log(Level.FINE, "DS Deposit identifier: {0}", dataset.getIdentifier());
- DatasetVersion newDatasetVersion = dataset.getEditVersion();
-
- String foreignFormat = SwordUtil.DCTERMS;
- try {
-
- importGenericService.importXML(deposit.getSwordEntry().toString(), foreignFormat, newDatasetVersion);
- } catch (Exception ex) {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem calling importXML: " + ex);
- }
+ if (deposit.isEntryOnly()) {
+ // do a sanity check on the XML received
+ try {
+ SwordEntry swordEntry = deposit.getSwordEntry();
+ logger.log(Level.FINE, "deposit XML received by createNew():\n{0}", swordEntry.toString());
+ } catch (ParseException ex) {
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can not create dataset due to malformed Atom entry: " + ex);
+ }
+
+ Dataset dataset = new Dataset();
+ dataset.setOwner(dvThatWillOwnDataset);
+ String nonNullDefaultIfKeyNotFound = "";
+ String protocol = settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound);
+ String authority = settingsService.getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound);
+ String separator = settingsService.getValueForKey(SettingsServiceBean.Key.DoiSeparator, nonNullDefaultIfKeyNotFound);
+ dataset.setProtocol(protocol);
+ dataset.setAuthority(authority);
+ dataset.setDoiSeparator(separator);
+ dataset.setIdentifier(datasetService.generateIdentifierSequence(protocol, authority, separator));
+ logger.log(Level.FINE, "DS Deposit identifier: {0}", dataset.getIdentifier());
+
+ CreateDatasetCommand createDatasetCommand = new CreateDatasetCommand(dataset, dvReq, false);
+ if (!permissionService.isUserAllowedOn(user, createDatasetCommand, dataset)) {
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to create a dataset in this dataverse.");
+ }
+
+ DatasetVersion newDatasetVersion = dataset.getEditVersion();
+
+ String foreignFormat = SwordUtil.DCTERMS;
+ try {
+
+ importGenericService.importXML(deposit.getSwordEntry().toString(), foreignFormat, newDatasetVersion);
+ } catch (Exception ex) {
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem calling importXML: " + ex);
+ }
- swordService.addDatasetContact(newDatasetVersion, user);
- swordService.addDatasetDepositor(newDatasetVersion, user);
- swordService.addDatasetSubjectIfMissing(newDatasetVersion);
- swordService.setDatasetLicenseAndTermsOfUse(newDatasetVersion, deposit.getSwordEntry());
-
- Dataset createdDataset = null;
- try {
- createdDataset = engineSvc.submit(new CreateDatasetCommand(dataset, dvReq, false));
- } catch (EJBException | CommandException ex) {
- Throwable cause = ex;
- StringBuilder sb = new StringBuilder();
- sb.append(ex.getLocalizedMessage());
- while (cause.getCause() != null) {
- cause = cause.getCause();
- /**
- * @todo move this ConstraintViolationException
- * check to CreateDatasetCommand. Can be
- * triggered if you don't call
- * dataset.setIdentifier() or if you feed it
- * date format we don't like. Once this is done
- * we should be able to drop EJBException from
- * the catch above and only catch
- * CommandException
- *
- * See also Have commands catch
- * ConstraintViolationException and turn them
- * into something that inherits from
- * CommandException · Issue #1009 ·
- * IQSS/dataverse -
- * https://github.com/IQSS/dataverse/issues/1009
- */
- if (cause instanceof ConstraintViolationException) {
- ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause;
- for (ConstraintViolation> violation : constraintViolationException.getConstraintViolations()) {
- sb.append(" Invalid value: '").append(violation.getInvalidValue()).append("' for ")
- .append(violation.getPropertyPath()).append(" at ")
- .append(violation.getLeafBean()).append(" - ")
- .append(violation.getMessage());
- }
+ swordService.addDatasetContact(newDatasetVersion, user);
+ swordService.addDatasetDepositor(newDatasetVersion, user);
+ swordService.addDatasetSubjectIfMissing(newDatasetVersion);
+ swordService.setDatasetLicenseAndTermsOfUse(newDatasetVersion, deposit.getSwordEntry());
+
+ Dataset createdDataset = null;
+ try {
+ createdDataset = engineSvc.submit(createDatasetCommand);
+ } catch (EJBException | CommandException ex) {
+ Throwable cause = ex;
+ StringBuilder sb = new StringBuilder();
+ sb.append(ex.getLocalizedMessage());
+ while (cause.getCause() != null) {
+ cause = cause.getCause();
+ /**
+ * @todo move this ConstraintViolationException
+ * check to CreateDatasetCommand. Can be triggered
+ * if you don't call dataset.setIdentifier() or if
+ * you feed it date format we don't like. Once this
+ * is done we should be able to drop EJBException
+ * from the catch above and only catch
+ * CommandException
+ *
+ * See also Have commands catch
+ * ConstraintViolationException and turn them into
+ * something that inherits from CommandException ·
+ * Issue #1009 · IQSS/dataverse -
+ * https://github.com/IQSS/dataverse/issues/1009
+ */
+ if (cause instanceof ConstraintViolationException) {
+ ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause;
+ for (ConstraintViolation> violation : constraintViolationException.getConstraintViolations()) {
+ sb.append(" Invalid value: '").append(violation.getInvalidValue()).append("' for ")
+ .append(violation.getPropertyPath()).append(" at ")
+ .append(violation.getLeafBean()).append(" - ")
+ .append(violation.getMessage());
}
}
- logger.info(sb.toString());
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't create dataset: " + sb.toString());
- }
- if (createdDataset != null) {
- ReceiptGenerator receiptGenerator = new ReceiptGenerator();
- String baseUrl = urlManager.getHostnamePlusBaseUrlPath(collectionUri);
- DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, createdDataset);
- return depositReceipt;
- } else {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Problem creating dataset. Null returned.");
}
- } else if (deposit.isBinaryOnly()) {
- // get here with this:
- // curl --insecure -s --data-binary "@example.zip" -H "Content-Disposition: filename=example.zip" -H "Content-Type: application/zip" https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/collection/dataverse/sword/
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Binary deposit to the collection IRI via POST is not supported. Please POST an Atom entry instead.");
- } else if (deposit.isMultipart()) {
- // get here with this:
- // wget https://raw.github.com/swordapp/Simple-Sword-Server/master/tests/resources/multipart.dat
- // curl --insecure --data-binary "@multipart.dat" -H 'Content-Type: multipart/related; boundary="===============0670350989=="' -H "MIME-Version: 1.0" https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/collection/dataverse/sword/hdl:1902.1/12345
- // but...
- // "Yeah, multipart is critically broken across all implementations" -- http://www.mail-archive.com/sword-app-tech@lists.sourceforge.net/msg00327.html
- throw new UnsupportedOperationException("Not yet implemented");
+ logger.info(sb.toString());
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't create dataset: " + sb.toString());
+ }
+ if (createdDataset != null) {
+ ReceiptGenerator receiptGenerator = new ReceiptGenerator();
+ String baseUrl = urlManager.getHostnamePlusBaseUrlPath(collectionUri);
+ DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, createdDataset);
+ return depositReceipt;
} else {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "expected deposit types are isEntryOnly, isBinaryOnly, and isMultiPart");
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Problem creating dataset. Null returned.");
}
+ } else if (deposit.isBinaryOnly()) {
+ // get here with this:
+ // curl --insecure -s --data-binary "@example.zip" -H "Content-Disposition: filename=example.zip" -H "Content-Type: application/zip" https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/collection/dataverse/sword/
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Binary deposit to the collection IRI via POST is not supported. Please POST an Atom entry instead.");
+ } else if (deposit.isMultipart()) {
+ // get here with this:
+ // wget https://raw.github.com/swordapp/Simple-Sword-Server/master/tests/resources/multipart.dat
+ // curl --insecure --data-binary "@multipart.dat" -H 'Content-Type: multipart/related; boundary="===============0670350989=="' -H "MIME-Version: 1.0" https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/collection/dataverse/sword/hdl:1902.1/12345
+ // but...
+ // "Yeah, multipart is critically broken across all implementations" -- http://www.mail-archive.com/sword-app-tech@lists.sourceforge.net/msg00327.html
+ throw new UnsupportedOperationException("Not yet implemented");
} else {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to create a dataset in this dataverse.");
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "expected deposit types are isEntryOnly, isBinaryOnly, and isMultiPart");
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataverse: " + dvAlias);
@@ -188,7 +190,7 @@ public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCrede
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine target type or identifier from URL: " + collectionUri);
}
}
-
+
public void setRequest(HttpServletRequest request) {
this.request = request;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java
index f94ca2b07c7..91960a6e3d1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java
@@ -4,9 +4,11 @@
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseServiceBean;
+import edu.harvard.iq.dataverse.PermissionServiceBean;
+import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
-import java.util.ArrayList;
+import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
import java.util.List;
import java.util.logging.Logger;
import javax.ejb.EJB;
@@ -32,6 +34,8 @@ public class CollectionListManagerImpl implements CollectionListManager {
DataverseServiceBean dataverseService;
@EJB
DatasetServiceBean datasetService;
+ @EJB
+ PermissionServiceBean permissionService;
@Inject
SwordAuth swordAuth;
@Inject
@@ -50,47 +54,42 @@ public Feed listCollectionContents(IRI iri, AuthCredentials authCredentials, Swo
Dataverse dv = dataverseService.findByAlias(dvAlias);
if (dv != null) {
- if (swordAuth.hasAccessToModifyDataverse(dvReq, dv)) {
- Abdera abdera = new Abdera();
- Feed feed = abdera.newFeed();
- feed.setTitle(dv.getName());
+ /**
+ * We'll say having AddDataset is enough to use this API
+ * endpoint, which means you are a Contributor to that
+ * dataverse. If we let just anyone call this endpoint, they
+ * will be able to see if the supplied dataverse is published or
+ * not.
+ */
+ if (!permissionService.requestOn(dvReq, dv).has(Permission.AddDataset)) {
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to list datasets in dataverse " + dv.getAlias());
+ }
+ Abdera abdera = new Abdera();
+ Feed feed = abdera.newFeed();
+ feed.setTitle(dv.getName());
+ String baseUrl = urlManager.getHostnamePlusBaseUrlPath(iri.toString());
+ List datasets = datasetService.findByOwnerId(dv.getId());
+ for (Dataset dataset : datasets) {
/**
- * @todo For the supplied dataverse, should we should only
- * the datasets that are *owned* by the user? Probably not!
- * We should be using the permission system? Show the
- * equivalent of datasets the user is "admin" on? What
- * permission should we check?
- *
- * And should we only show datasets at the current level or
- * should we show datasets that are in sub-dataverses as
- * well?
+ * @todo Will this be performant enough with production
+ * data, say in the root dataverse? Remove this todo if
+ * there are no complaints. :)
*/
- List childDvObjects = dataverseService.findByOwnerId(dv.getId());
- childDvObjects.addAll(datasetService.findByOwnerId(dv.getId()));
- List datasets = new ArrayList<>();
- for (Object object : childDvObjects) {
- if (object instanceof Dataset) {
- datasets.add((Dataset) object);
- }
+ if (!permissionService.isUserAllowedOn(user, new UpdateDatasetCommand(dataset, dvReq), dataset)) {
+ continue;
}
- String baseUrl = urlManager.getHostnamePlusBaseUrlPath(iri.toString());
- for (Dataset dataset : datasets) {
- String editUri = baseUrl + "/edit/study/" + dataset.getGlobalId();
- String editMediaUri = baseUrl + "/edit-media/study/" + dataset.getGlobalId();
- Entry entry = feed.addEntry();
- entry.setId(editUri);
- entry.setTitle(datasetService.getTitleFromLatestVersion(dataset.getId()));
- entry.setBaseUri(new IRI(editUri));
- entry.addLink(editMediaUri, "edit-media");
- feed.addEntry(entry);
- }
- Boolean dvHasBeenReleased = dv.isReleased();
- feed.addSimpleExtension(new QName(UriRegistry.SWORD_STATE, "dataverseHasBeenReleased"), dvHasBeenReleased.toString());
- return feed;
- } else {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to list datasets in dataverse " + dv.getAlias());
+ String editUri = baseUrl + "/edit/study/" + dataset.getGlobalId();
+ String editMediaUri = baseUrl + "/edit-media/study/" + dataset.getGlobalId();
+ Entry entry = feed.addEntry();
+ entry.setId(editUri);
+ entry.setTitle(datasetService.getTitleFromLatestVersion(dataset.getId()));
+ entry.setBaseUri(new IRI(editUri));
+ entry.addLink(editMediaUri, "edit-media");
+ feed.addEntry(entry);
}
-
+ Boolean dvHasBeenReleased = dv.isReleased();
+ feed.addSimpleExtension(new QName(UriRegistry.SWORD_STATE, "dataverseHasBeenReleased"), dvHasBeenReleased.toString());
+ return feed;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataverse: " + dvAlias);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java
index 878635083b9..b4f5a5c0ec3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java
@@ -7,8 +7,8 @@
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.EjbDataverseEngine;
+import edu.harvard.iq.dataverse.PermissionServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand;
@@ -18,6 +18,7 @@
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
import edu.harvard.iq.dataverse.api.imports.ImportGenericServiceBean;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import java.util.ArrayList;
import java.util.List;
@@ -57,6 +58,8 @@ public class ContainerManagerImpl implements ContainerManager {
EntityManager em;
@EJB
ImportGenericServiceBean importGenericService;
+ @EJB
+ PermissionServiceBean permissionService;
@Inject
SwordAuth swordAuth;
@Inject
@@ -65,7 +68,7 @@ public class ContainerManagerImpl implements ContainerManager {
@EJB
SwordServiceBean swordService;
private HttpServletRequest httpRequest;
-
+
@Override
public DepositReceipt getEntry(String uri, Map map, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordServerException, SwordError, SwordAuthException {
AuthenticatedUser user = swordAuth.auth(authCredentials);
@@ -79,18 +82,17 @@ public DepositReceipt getEntry(String uri, Map map, AuthCredenti
String globalId = urlManager.getTargetIdentifier();
Dataset dataset = datasetService.findByGlobalId(globalId);
if (dataset != null) {
+ if (!permissionService.isUserAllowedOn(user, new GetDraftDatasetVersionCommand(dvReq, dataset), dataset)) {
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to retrieve entry for " + dataset.getGlobalId());
+ }
Dataverse dvThatOwnsDataset = dataset.getOwner();
- if (swordAuth.hasAccessToModifyDataverse(dvReq, dvThatOwnsDataset)) {
- ReceiptGenerator receiptGenerator = new ReceiptGenerator();
- String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
- DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
- if (depositReceipt != null) {
- return depositReceipt;
- } else {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not generate deposit receipt.");
- }
+ ReceiptGenerator receiptGenerator = new ReceiptGenerator();
+ String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
+ DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
+ if (depositReceipt != null) {
+ return depositReceipt;
} else {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to retrieve entry for " + dataset.getGlobalId());
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not generate deposit receipt.");
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataset based on URL: " + uri);
@@ -129,33 +131,33 @@ public DepositReceipt replaceMetadata(String uri, Deposit deposit, AuthCredentia
if (dataset != null) {
SwordUtil.datasetLockCheck(dataset);
Dataverse dvThatOwnsDataset = dataset.getOwner();
- if (swordAuth.hasAccessToModifyDataverse(dvReq, dvThatOwnsDataset)) {
- DatasetVersion datasetVersion = dataset.getEditVersion();
- // erase all metadata before creating populating dataset version
- List emptyDatasetFields = new ArrayList<>();
- datasetVersion.setDatasetFields(emptyDatasetFields);
- String foreignFormat = SwordUtil.DCTERMS;
- try {
- importGenericService.importXML(deposit.getSwordEntry().toString(), foreignFormat, datasetVersion);
- } catch (Exception ex) {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem calling importXML: " + ex);
- }
- swordService.addDatasetContact(datasetVersion, user);
- swordService.addDatasetDepositor(datasetVersion, user);
- swordService.addDatasetSubjectIfMissing(datasetVersion);
- swordService.setDatasetLicenseAndTermsOfUse(datasetVersion, deposit.getSwordEntry());
- try {
- engineSvc.submit(new UpdateDatasetCommand(dataset, dvReq));
- } catch (CommandException ex) {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem updating dataset: " + ex);
- }
- ReceiptGenerator receiptGenerator = new ReceiptGenerator();
- String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
- DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
- return depositReceipt;
- } else {
+ UpdateDatasetCommand updateDatasetCommand = new UpdateDatasetCommand(dataset, dvReq);
+ if (!permissionService.isUserAllowedOn(user, updateDatasetCommand, dataset)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataverse " + dvThatOwnsDataset.getAlias());
}
+ DatasetVersion datasetVersion = dataset.getEditVersion();
+ // erase all metadata before creating populating dataset version
+ List emptyDatasetFields = new ArrayList<>();
+ datasetVersion.setDatasetFields(emptyDatasetFields);
+ String foreignFormat = SwordUtil.DCTERMS;
+ try {
+ importGenericService.importXML(deposit.getSwordEntry().toString(), foreignFormat, datasetVersion);
+ } catch (Exception ex) {
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem calling importXML: " + ex);
+ }
+ swordService.addDatasetContact(datasetVersion, user);
+ swordService.addDatasetDepositor(datasetVersion, user);
+ swordService.addDatasetSubjectIfMissing(datasetVersion);
+ swordService.setDatasetLicenseAndTermsOfUse(datasetVersion, deposit.getSwordEntry());
+ try {
+ engineSvc.submit(updateDatasetCommand);
+ } catch (CommandException ex) {
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem updating dataset: " + ex);
+ }
+ ReceiptGenerator receiptGenerator = new ReceiptGenerator();
+ String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
+ DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
+ return depositReceipt;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataset based on global id (" + globalId + ") in URL: " + uri);
}
@@ -209,17 +211,24 @@ public void deleteContainer(String uri, AuthCredentials authCredentials, SwordCo
if (globalId != null) {
Dataset dataset = dataset = datasetService.findByGlobalId(globalId);
if (dataset != null) {
- SwordUtil.datasetLockCheck(dataset);
Dataverse dvThatOwnsDataset = dataset.getOwner();
- if (!swordAuth.hasAccessToModifyDataverse(dvRequest, dvThatOwnsDataset)) {
+ /**
+ * We are checking if DeleteDatasetVersionCommand can be
+ * called even though DeleteDatasetCommand can be called
+ * when a dataset hasn't been published. They should be
+ * equivalent in terms of a permission check.
+ */
+ DeleteDatasetVersionCommand deleteDatasetVersionCommand = new DeleteDatasetVersionCommand(dvRequest, dataset);
+ if (!permissionService.isUserAllowedOn(user, deleteDatasetVersionCommand, dataset)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify " + dvThatOwnsDataset.getAlias());
}
+ SwordUtil.datasetLockCheck(dataset);
DatasetVersion.VersionState datasetVersionState = dataset.getLatestVersion().getVersionState();
if (dataset.isReleased()) {
if (datasetVersionState.equals(DatasetVersion.VersionState.DRAFT)) {
logger.info("destroying working copy version of dataset " + dataset.getGlobalId());
try {
- engineSvc.submit(new DeleteDatasetVersionCommand(dvRequest, dataset));
+ engineSvc.submit(deleteDatasetVersionCommand);
} catch (CommandException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset version for " + dataset.getGlobalId() + ": " + ex);
}
@@ -233,6 +242,11 @@ public void deleteContainer(String uri, AuthCredentials authCredentials, SwordCo
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Operation not valid for dataset " + dataset.getGlobalId() + " in state " + datasetVersionState);
}
+ /**
+ * @todo Reformat else below properly so you can
+ * just reformat the whole file in Netbeans or
+ * similar.
+ */
} else {
// dataset has never been published, this is just a sanity check (should always be draft)
if (datasetVersionState.equals(DatasetVersion.VersionState.DRAFT)) {
@@ -285,54 +299,52 @@ public DepositReceipt useHeaders(String uri, Deposit deposit, AuthCredentials au
}
if (dataset != null) {
Dataverse dvThatOwnsDataset = dataset.getOwner();
- if (swordAuth.hasAccessToModifyDataverse(dvRequest, dvThatOwnsDataset)) {
- if (!deposit.isInProgress()) {
- /**
- * We are considering a draft version of a study
- * to be incomplete and are saying that sending
- * isInProgress=false means the study version is
- * complete and can be released.
- *
- * 9.2. Deposit Incomplete
- *
- * "If In-Progress is true, the server SHOULD
- * expect the client to provide further updates
- * to the item some undetermined time in the
- * future. Details of how this is implemented is
- * dependent on the server's purpose. For
- * example, a repository system may hold items
- * which are marked In-Progress in a workspace
- * until such time as a client request indicates
- * that the deposit is complete." --
- * http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html#continueddeposit_incomplete
- */
- if (!dataset.getLatestVersion().getVersionState().equals(DatasetVersion.VersionState.RELEASED)) {
- Command cmd;
- try {
- boolean doMinorVersionBump = false;
- // if dataset is unreleased, major version; if released, then check if can be minor
- if (dataset.isReleased() && dataset.getLatestVersion().isMinorUpdate()) {
- doMinorVersionBump = true;
- }
- cmd = new PublishDatasetCommand(dataset, dvRequest, doMinorVersionBump);
- dataset = engineSvc.submit(cmd);
- } catch (CommandException ex) {
- String msg = "Unable to publish dataset: " + ex;
- logger.severe(msg + ": " + ex.getMessage());
- throw SwordUtil.throwRegularSwordErrorWithoutStackTrace(msg);
- }
- ReceiptGenerator receiptGenerator = new ReceiptGenerator();
- String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
- DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
- return depositReceipt;
- } else {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Latest version of dataset " + globalId + " has already been published.");
+ boolean doMinorVersionBump = false;
+ // if dataset is unreleased, major version; if released, then check if can be minor
+ if (dataset.isReleased() && dataset.getLatestVersion().isMinorUpdate()) {
+ doMinorVersionBump = true;
+ }
+ PublishDatasetCommand publishDatasetCommand = new PublishDatasetCommand(dataset, dvRequest, doMinorVersionBump);
+ if (!permissionService.isUserAllowedOn(user, publishDatasetCommand, dataset)) {
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataverse " + dvThatOwnsDataset.getAlias());
+ }
+ if (!deposit.isInProgress()) {
+ /**
+ * We are considering a draft version of a study to
+ * be incomplete and are saying that sending
+ * isInProgress=false means the study version is
+ * complete and can be released.
+ *
+ * 9.2. Deposit Incomplete
+ *
+ * "If In-Progress is true, the server SHOULD expect
+ * the client to provide further updates to the item
+ * some undetermined time in the future. Details of
+ * how this is implemented is dependent on the
+ * server's purpose. For example, a repository
+ * system may hold items which are marked
+ * In-Progress in a workspace until such time as a
+ * client request indicates that the deposit is
+ * complete." --
+ * http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html#continueddeposit_incomplete
+ */
+ if (!dataset.getLatestVersion().getVersionState().equals(DatasetVersion.VersionState.RELEASED)) {
+ try {
+ dataset = engineSvc.submit(publishDatasetCommand);
+ } catch (CommandException ex) {
+ String msg = "Unable to publish dataset: " + ex;
+ logger.severe(msg + ": " + ex.getMessage());
+ throw SwordUtil.throwRegularSwordErrorWithoutStackTrace(msg);
}
+ ReceiptGenerator receiptGenerator = new ReceiptGenerator();
+ String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
+ DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
+ return depositReceipt;
} else {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Pass 'In-Progress: false' header to publish a dataset.");
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Latest version of dataset " + globalId + " has already been published.");
}
} else {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataverse " + dvThatOwnsDataset.getAlias());
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Pass 'In-Progress: false' header to publish a dataset.");
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataset using globalId " + globalId);
@@ -345,15 +357,15 @@ public DepositReceipt useHeaders(String uri, Deposit deposit, AuthCredentials au
if (dvAlias != null) {
Dataverse dvToRelease = dataverseService.findByAlias(dvAlias);
if (dvToRelease != null) {
- if (!swordAuth.hasAccessToModifyDataverse(dvRequest, dvToRelease)) {
+ PublishDataverseCommand publishDataverseCommand = new PublishDataverseCommand(dvRequest, dvToRelease);
+ if (!permissionService.isUserAllowedOn(user, publishDataverseCommand, dvToRelease)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataverse " + dvAlias);
}
if (deposit.isInProgress()) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unpublishing a dataverse is not supported.");
}
- PublishDataverseCommand cmd = new PublishDataverseCommand(dvRequest, dvToRelease);
try {
- engineSvc.submit(cmd);
+ engineSvc.submit(publishDataverseCommand);
ReceiptGenerator receiptGenerator = new ReceiptGenerator();
String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
DepositReceipt depositReceipt = receiptGenerator.createDataverseReceipt(baseUrl, dvToRelease);
@@ -390,6 +402,5 @@ public boolean isStatementRequest(String uri, Map map, AuthCrede
public void setHttpRequest(HttpServletRequest httpRequest) {
this.httpRequest = httpRequest;
}
-
-
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
index f0513ab4e11..6537ec488da 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
@@ -7,8 +7,8 @@
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.EjbDataverseEngine;
+import edu.harvard.iq.dataverse.PermissionServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
@@ -48,11 +48,13 @@ public class MediaResourceManagerImpl implements MediaResourceManager {
DataFileServiceBean dataFileService;
@EJB
IngestServiceBean ingestService;
+ @EJB
+ PermissionServiceBean permissionService;
@Inject
SwordAuth swordAuth;
@Inject
UrlManager urlManager;
-
+
private HttpServletRequest httpRequest;
@Override
@@ -77,7 +79,12 @@ public MediaResource getMediaResourceRepresentation(String uri, Map cmd;
- cmd = new UpdateDatasetCommand(dataset, dvReq);
try {
- dataset = commandEngine.submit(cmd);
+ dataset = commandEngine.submit(updateDatasetCommand);
} catch (CommandException ex) {
throw returnEarly("Couldn't update dataset " + ex);
} catch (EJBException ex) {
@@ -341,6 +343,5 @@ private SwordError returnEarly(String error) {
public void setHttpRequest(HttpServletRequest httpRequest) {
this.httpRequest = httpRequest;
}
-
-
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java
index 5fb94ce17f3..7c80fe810d2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java
@@ -60,6 +60,12 @@ public ServiceDocument getServiceDocument(String sdUri, AuthCredentials authCred
return serviceDocument;
}
+ /**
+ * We don't expect this to support Shibboleth groups because even though
+ * a Shibboleth user can have an API token the transient
+ * shibIdentityProvider String on AuthenticatedUser is only set when a
+ * SAML assertion is made at runtime via the browser.
+ */
List dataverses = permissionService.getDataversesUserHasPermissionOn(user, Permission.AddDataset);
for (Dataverse dataverse : dataverses) {
String dvAlias = dataverse.getAlias();
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java
index 18ea18663eb..5089204f854 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java
@@ -6,8 +6,10 @@
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.PermissionServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@@ -36,17 +38,20 @@ public class StatementManagerImpl implements StatementManager {
@EJB
DatasetServiceBean datasetService;
+ @EJB
+ PermissionServiceBean permissionService;
@Inject
SwordAuth swordAuth;
@Inject
UrlManager urlManager;
-
+
private HttpServletRequest httpRequest;
-
+
@Override
public Statement getStatement(String editUri, Map map, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordServerException, SwordError, SwordAuthException {
AuthenticatedUser user = swordAuth.auth(authCredentials);
+ DataverseRequest dvReq = new DataverseRequest(user, httpRequest);
urlManager.processUrl(editUri);
String globalId = urlManager.getTargetIdentifier();
if (urlManager.getTargetType().equals("study") && globalId != null) {
@@ -58,69 +63,71 @@ public Statement getStatement(String editUri, Map map, AuthCrede
}
Dataverse dvThatOwnsDataset = dataset.getOwner();
- if (swordAuth.hasAccessToModifyDataverse( new DataverseRequest(user, httpRequest), dvThatOwnsDataset)) {
- String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + dataset.getGlobalId();
- String author = dataset.getLatestVersion().getAuthorsStr();
- String title = dataset.getLatestVersion().getTitle();
- // in the statement, the element is called "updated"
- Date lastUpdatedFinal = new Date();
- Date lastUpdateTime = dataset.getLatestVersion().getLastUpdateTime();
- if (lastUpdateTime != null) {
- lastUpdatedFinal = lastUpdateTime;
+ if (!permissionService.isUserAllowedOn(user, new GetDraftDatasetVersionCommand(dvReq, dataset), dataset)) {
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to view dataset with global ID " + globalId);
+ }
+ String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + dataset.getGlobalId();
+ String author = dataset.getLatestVersion().getAuthorsStr();
+ String title = dataset.getLatestVersion().getTitle();
+ // in the statement, the element is called "updated"
+ Date lastUpdatedFinal = new Date();
+ Date lastUpdateTime = dataset.getLatestVersion().getLastUpdateTime();
+ if (lastUpdateTime != null) {
+ lastUpdatedFinal = lastUpdateTime;
+ } else {
+ logger.info("lastUpdateTime was null, trying createtime");
+ Date createtime = dataset.getLatestVersion().getCreateTime();
+ if (createtime != null) {
+ lastUpdatedFinal = createtime;
} else {
- logger.info("lastUpdateTime was null, trying createtime");
- Date createtime = dataset.getLatestVersion().getCreateTime();
- if (createtime != null) {
- lastUpdatedFinal = createtime;
- } else {
- logger.info("creatime was null, using \"now\"");
- lastUpdatedFinal = new Date();
- }
+ logger.info("creatime was null, using \"now\"");
+ lastUpdatedFinal = new Date();
}
- AtomDate atomDate = new AtomDate(lastUpdatedFinal);
- String datedUpdated = atomDate.toString();
- Statement statement = new AtomStatement(feedUri, author, title, datedUpdated);
- Map states = new HashMap<>();
- states.put("latestVersionState", dataset.getLatestVersion().getVersionState().toString());
- Boolean isMinorUpdate = dataset.getLatestVersion().isMinorUpdate();
- states.put("isMinorUpdate", isMinorUpdate.toString());
- DatasetLock lock = dataset.getDatasetLock();
- if (lock != null) {
- states.put("locked", "true");
- states.put("lockedDetail", lock.getInfo());
- states.put("lockedStartTime", lock.getStartTime().toString());
- } else {
- states.put("locked", "false");
+ }
+ AtomDate atomDate = new AtomDate(lastUpdatedFinal);
+ String datedUpdated = atomDate.toString();
+ Statement statement = new AtomStatement(feedUri, author, title, datedUpdated);
+ Map states = new HashMap<>();
+ states.put("latestVersionState", dataset.getLatestVersion().getVersionState().toString());
+ Boolean isMinorUpdate = dataset.getLatestVersion().isMinorUpdate();
+ states.put("isMinorUpdate", isMinorUpdate.toString());
+ DatasetLock lock = dataset.getDatasetLock();
+ if (lock != null) {
+ states.put("locked", "true");
+ states.put("lockedDetail", lock.getInfo());
+ states.put("lockedStartTime", lock.getStartTime().toString());
+ } else {
+ states.put("locked", "false");
+ }
+ statement.setStates(states);
+ List fileMetadatas = dataset.getLatestVersion().getFileMetadatas();
+ for (FileMetadata fileMetadata : fileMetadatas) {
+ DataFile dataFile = fileMetadata.getDataFile();
+ // We are exposing the filename for informational purposes. The file id is what you
+ // actually operate on to delete a file, etc.
+ //
+ // Replace spaces to avoid IRISyntaxException
+ String fileNameFinal = fileMetadata.getLabel().replace(' ', '_');
+ String fileUrlString = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit-media/file/" + dataFile.getId() + "/" + fileNameFinal;
+ IRI fileUrl;
+ try {
+ fileUrl = new IRI(fileUrlString);
+ } catch (IRISyntaxException ex) {
+ throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Invalid URL for file ( " + fileUrlString + " ) resulted in " + ex.getMessage());
}
- statement.setStates(states);
- List fileMetadatas = dataset.getLatestVersion().getFileMetadatas();
- for (FileMetadata fileMetadata : fileMetadatas) {
- DataFile dataFile = fileMetadata.getDataFile();
- // We are exposing the filename for informational purposes. The file id is what you
- // actually operate on to delete a file, etc.
- //
- // Replace spaces to avoid IRISyntaxException
- String fileNameFinal = fileMetadata.getLabel().replace(' ', '_');
- String fileUrlString = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit-media/file/" + dataFile.getId() + "/" + fileNameFinal;
- IRI fileUrl;
- try {
- fileUrl = new IRI(fileUrlString);
- } catch (IRISyntaxException ex) {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Invalid URL for file ( " + fileUrlString + " ) resulted in " + ex.getMessage());
- }
- ResourcePart resourcePart = new ResourcePart(fileUrl.toString());
- // default to something that doesn't throw a org.apache.abdera.util.MimeTypeParseException
- String finalFileFormat = "application/octet-stream";
- String contentType = dataFile.getContentType();
- if (contentType != null) {
- finalFileFormat = contentType;
- }
- resourcePart.setMediaType(finalFileFormat);
- /**
- * @todo: Why are properties set on a ResourcePart not
- * exposed when you GET a Statement? Asked about this at
- * http://www.mail-archive.com/sword-app-tech@lists.sourceforge.net/msg00394.html
- */
+ ResourcePart resourcePart = new ResourcePart(fileUrl.toString());
+ // default to something that doesn't throw a org.apache.abdera.util.MimeTypeParseException
+ String finalFileFormat = "application/octet-stream";
+ String contentType = dataFile.getContentType();
+ if (contentType != null) {
+ finalFileFormat = contentType;
+ }
+ resourcePart.setMediaType(finalFileFormat);
+ /**
+ * @todo: Why are properties set on a ResourcePart not exposed
+ * when you GET a Statement? Asked about this at
+ * http://www.mail-archive.com/sword-app-tech@lists.sourceforge.net/msg00394.html
+ */
// Map properties = new HashMap();
// properties.put("filename", studyFile.getFileName());
// properties.put("category", studyFile.getLatestCategory());
@@ -128,19 +135,16 @@ public Statement getStatement(String editUri, Map map, AuthCrede
// properties.put("id", studyFile.getId().toString());
// properties.put("UNF", studyFile.getUnf());
// resourcePart.setProperties(properties);
- statement.addResource(resourcePart);
- /**
- * @todo it's been noted at
- * https://github.com/IQSS/dataverse/issues/892#issuecomment-54159284
- * that at the file level the "updated" date is always
- * "now", which seems to be set here:
- * https://github.com/swordapp/JavaServer2.0/blob/sword2-server-1.0/src/main/java/org/swordapp/server/AtomStatement.java#L70
- */
- }
- return statement;
- } else {
- throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to view dataset with global ID " + globalId);
+ statement.addResource(resourcePart);
+ /**
+ * @todo it's been noted at
+ * https://github.com/IQSS/dataverse/issues/892#issuecomment-54159284
+ * that at the file level the "updated" date is always "now",
+ * which seems to be set here:
+ * https://github.com/swordapp/JavaServer2.0/blob/sword2-server-1.0/src/main/java/org/swordapp/server/AtomStatement.java#L70
+ */
}
+ return statement;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine target type or identifier from URL: " + editUri);
}
@@ -149,5 +153,5 @@ public Statement getStatement(String editUri, Map map, AuthCrede
public void setHttpRequest(HttpServletRequest httpRequest) {
this.httpRequest = httpRequest;
}
-
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java
index 34a7ccd3df4..9fe6033288f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java
@@ -1,33 +1,22 @@
package edu.harvard.iq.dataverse.api.datadeposit;
-import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
-import edu.harvard.iq.dataverse.PermissionServiceBean;
-import edu.harvard.iq.dataverse.RoleAssignment;
-import edu.harvard.iq.dataverse.UserServiceBean;
import edu.harvard.iq.dataverse.api.AbstractApiBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
-import java.util.logging.Level;
import java.util.logging.Logger;
-import javax.ejb.EJB;
import org.swordapp.server.AuthCredentials;
import org.swordapp.server.SwordAuthException;
-import org.swordapp.server.SwordError;
import org.swordapp.server.SwordServerException;
+/**
+ * In early version of Dataverse 4 this class was responsible for both
+ * instantiating an AuthenticatedUser and enforcing permissions but now
+ * permission enforcement is done inside each of the methods in the "*Impl.java"
+ * files for SWORD.
+ */
public class SwordAuth extends AbstractApiBean {
private static final Logger logger = Logger.getLogger(SwordAuth.class.getCanonicalName());
- @EJB
- PermissionServiceBean permissionService;
- @EJB
- DataverseRoleServiceBean roleService;
- @EJB
- UserServiceBean userService;
-
public AuthenticatedUser auth(AuthCredentials authCredentials) throws SwordAuthException, SwordServerException {
if (authCredentials == null) {
@@ -57,67 +46,4 @@ public AuthenticatedUser auth(AuthCredentials authCredentials) throws SwordAuthE
}
}
- /**
- * @todo Review every place this method is called and think about how we can
- * use more granular permissions rather than the old equivalent of "admin"
- * in DVN 3.x.
- */
- boolean hasAccessToModifyDataverse(DataverseRequest dataverseRequest, Dataverse dataverse) throws SwordError {
- boolean authorized = false;
-
- /**
- * @todo use actual roles
- */
-// VDCRole role = vdcUser.getVDCRole(dv);
-// String roleString = null;
-// if (role != null) {
-// roleString = role.getRole().getName();
-// if ("admin".equals(roleString)) {
-// authorized = true;
-// } else if ("contributor".equals(roleString) || "curator".equals(roleString) || "privileged viewer".equals(roleString)) {
-// authorized = false;
-// return early to avoid throwing exception when getting Service Document
-// return authorized;
-// } else {
-// authorized = false;
-// }
-// }
-//
- for (RoleAssignment roleAssignment : roleService.assignmentsFor(dataverseRequest.getUser(), dataverse).getAssignments()) {
- /**
- * @todo do we want to hard code a check for the string "manager"
- * here? Probably not... for now let's just check for
- * Permission.DestructiveEdit which feels equivalent to the "admin"
- * role in DVN 3.x. We could also do a check for an admin-type
- * command like this: permissionService.userOn(dataverseUser,
- * dataverse).canIssue(DestroyDataverseCommand.class)
- *
- * @todo What about the root dataverse? With the GUI, any user can
- * create datasets in the root dataverse but users won't be "admin"
- * of the root dataverse. The "all or nothing" admin concept for all
- * SWORD operations will probably need to go away. Rather than a
- * single hasAccessToModifyDataverse method, we should have methods
- * per SWORD commands that map onto permissions like
- * canIssue(CreateDatasetCommand.class)
- */
- logger.log(Level.FINE, "{0}: {1} has role {2}", new Object[]{dataverse.getAlias(), dataverseRequest.getUser().getIdentifier(), roleAssignment.getRole().getAlias()});
- }
- if (permissionService.requestOn(dataverseRequest, dataverse).has(Permission.EditDataverse)) {
- authorized = true;
- return authorized;
- } else {
- authorized = false;
- return authorized;
- }
-
- /**
- * @todo: for backwards compatibility with DVN 3.x do we need to throw
- * this SWORD error?
- */
-// if (!authorized) {
-// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + dataverseUser.getUserName() + " with role of " + roleString + " is not authorized to modify dataverse " + dataverse.getAlias());
-// } else {
-// return authorized;
-// }
- }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/UrlManager.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/UrlManager.java
index 25593694def..5c53e387d4d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/UrlManager.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/UrlManager.java
@@ -181,6 +181,15 @@ String getHostnamePlusBaseUrlPath(String url) throws SwordError {
* users are operating on the URLs returned (as they should) returning
* the current version will avoid deprecation warnings on the Dataverse
* side.
+ *
+ * @todo Prevent "https://localhost:8080" from being returned. It should
+ * either be "http://localhost:8080" or "https://localhost:8181". Use
+ * SystemConfig.getDataverseSiteUrl instead of SystemConfig.FQDN above.
+ * It's worse for security to not have https hard coded here but if
+ * users have configured dataverse.siteUrl to be http rather than https
+ * we assume they are doing this on purpose (despite our warnings in the
+ * Installation Guide), perhaps because they are only kicking the tires
+ * on Dataverse.
*/
return "https://" + hostName + optionalPort + swordConfiguration.getBaseUrlPathCurrent();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/DataFileDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/DataFileDTO.java
index 10dc506ae2c..45a231cbfb9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/dto/DataFileDTO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/DataFileDTO.java
@@ -9,9 +9,8 @@
* @author ellenk
*/
public class DataFileDTO {
- private String fileName;
private String id;
- private String name;
+ private String storageIdentifier;
private String contentType;
private String filename;
private String originalFileFormat;
@@ -29,14 +28,14 @@ public void setDataTables(List dataTables) {
this.dataTables = dataTables;
}
- public String getFileName() {
- return fileName;
+ public String getStorageIdentifier() {
+ return storageIdentifier;
}
- public void setFileName(String fileName) {
- this.fileName = fileName;
+ public void setStorageIdentifier(String storageIdentifier) {
+ this.storageIdentifier = storageIdentifier;
}
-
+
public String getId() {
return id;
}
@@ -45,14 +44,6 @@ public void setId(String id) {
this.id = id;
}
- public String getName() {
- return name;
- }
-
- public void setName(String name) {
- this.name = name;
- }
-
public String getContentType() {
return contentType;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java
index 65d1b1748b4..41b5ff4b700 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java
@@ -14,6 +14,8 @@ public class DatasetDTO implements java.io.Serializable {
private String authority;
private String globalIdCreateTime;
private String doiSeparator;
+ private String publisher;
+ private String publicationDate;
private DatasetVersionDTO datasetVersion;
private List dataFiles;
@@ -81,6 +83,22 @@ public String getDoiSeparator() {
public void setDoiSeparator(String doiSeparator) {
this.doiSeparator = doiSeparator;
}
+
+ public String getPublisher() {
+ return publisher;
+ }
+
+ public void setPublisher(String publisher) {
+ this.publisher = publisher;
+ }
+
+ public String getPublicationDate() {
+ return publicationDate;
+ }
+
+ public void setPublicationDate(String publicationDate) {
+ this.publicationDate = publicationDate;
+ }
@Override
public String toString() {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetVersionDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetVersionDTO.java
index e4c4140e8a0..0fbfef37056 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetVersionDTO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetVersionDTO.java
@@ -17,6 +17,7 @@ public class DatasetVersionDTO {
long id;
VersionState versionState;
String releaseDate;
+ String releaseTime;
String lastUpdateTime;
String createTime;
String archiveTime;
@@ -37,6 +38,7 @@ public class DatasetVersionDTO {
String sizeOfCollection;
String studyCompletion;
String citation;
+ String license;
boolean inReview;
Map metadataBlocks;
@@ -260,6 +262,15 @@ public String getReleaseDate() {
public void setReleaseDate(String releaseDate) {
this.releaseDate = releaseDate;
}
+
+
+ public String getReleaseTime() {
+ return releaseTime;
+ }
+
+ public void setReleaseTime(String releaseTime) {
+ this.releaseTime = releaseTime;
+ }
public String getLastUpdateTime() {
return lastUpdateTime;
@@ -284,6 +295,14 @@ public String getArchiveTime() {
public void setArchiveTime(String archiveTime) {
this.archiveTime = archiveTime;
}
+
+ public String getLicense() {
+ return license;
+ }
+
+ public void setLicense(String license) {
+ this.license = license;
+ }
public Map getMetadataBlocks() {
return metadataBlocks;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/FileDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/FileDTO.java
index cb600b09842..6b375ffc837 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/dto/FileDTO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/FileDTO.java
@@ -2,14 +2,14 @@
public class FileDTO {
- DataFileDTO datafile;
+ DataFileDTO dataFile;
- public DataFileDTO getDatafile() {
- return datafile;
+ public DataFileDTO getDataFile() {
+ return dataFile;
}
- public void setDatafile(DataFileDTO datafile) {
- this.datafile = datafile;
+ public void setDataFile(DataFileDTO datafile) {
+ this.dataFile = datafile;
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
index 55b939f1f7c..b96acd8fc7e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
@@ -9,6 +9,7 @@
import edu.harvard.iq.dataverse.api.dto.FieldDTO;
import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType;
+import static edu.harvard.iq.dataverse.export.ddi.DdiExportUtil.NOTE_TYPE_CONTENTTYPE;
import edu.harvard.iq.dataverse.util.StringUtil;
import java.io.File;
import java.io.FileInputStream;
@@ -33,6 +34,11 @@
*
* @author ellenk
*/
+// TODO:
+// does this need to be a service bean/stateless? - could be transformed into
+// a util with static methods.
+// (it would need to be passed the fields service beans as arguments)
+// -- L.A. 4.5
@Stateless
public class ImportDDIServiceBean {
public static final String SOURCE_DVN_3_0 = "DVN_3_0";
@@ -86,22 +92,23 @@ public class ImportDDIServiceBean {
public static final String NOTE_TYPE_REPLICATION_FOR = "DVN:REPLICATION_FOR";
private XMLInputFactory xmlInputFactory = null;
- private ImportType importType;
-
+
@EJB CustomFieldServiceBean customFieldService;
@EJB DatasetFieldServiceBean datasetFieldService;
-
+
+ // TODO:
+ // stop passing the xml source as a string; (it could be huge!) -- L.A. 4.5
public DatasetDTO doImport(ImportType importType, String xmlToParse) throws XMLStreamException, ImportException {
- this.importType=importType;
xmlInputFactory = javax.xml.stream.XMLInputFactory.newInstance();
xmlInputFactory.setProperty("javax.xml.stream.isCoalescing", java.lang.Boolean.TRUE); DatasetDTO datasetDTO = this.initializeDataset();
// Read docDescr and studyDesc into DTO objects.
- Map fileMap = mapDDI(xmlToParse, datasetDTO);
- if (!importType.equals(ImportType.MIGRATION)) {
- // For migration, this filemetadata is copied in a separate SQL step
+ // TODO: the fileMap is likely not needed.
+ Map fileMap = mapDDI(importType, xmlToParse, datasetDTO);
+ if (!isMigrationImport(importType)) {
+ // For migration, this filemetadata is copied in a separate SQL step
}
return datasetDTO;
}
@@ -110,22 +117,36 @@ public void importFileMetadata(DatasetVersion dv, String xmlToParse) {
}
+ private boolean isHarvestImport(ImportType importType) {
+ return importType.equals(ImportType.HARVEST) || importType.equals(ImportType.HARVEST_WITH_FILES);
+ }
+
+ private boolean isHarvestWithFilesImport(ImportType importType) {
+ return importType.equals(ImportType.HARVEST_WITH_FILES);
+ }
+
+ private boolean isNewImport(ImportType importType) {
+ return importType.equals(ImportType.NEW);
+ }
+ private boolean isMigrationImport(ImportType importType) {
+ return importType.equals(ImportType.MIGRATION);
+ }
- public Map mapDDI(String xmlToParse, DatasetDTO datasetDTO) throws XMLStreamException, ImportException {
+ public Map mapDDI(ImportType importType, String xmlToParse, DatasetDTO datasetDTO) throws XMLStreamException, ImportException {
Map filesMap = new HashMap();
StringReader reader = new StringReader(xmlToParse);
XMLStreamReader xmlr = null;
XMLInputFactory xmlFactory = javax.xml.stream.XMLInputFactory.newInstance();
xmlr = xmlFactory.createXMLStreamReader(reader);
- processDDI(xmlr, datasetDTO, filesMap);
+ processDDI(importType, xmlr, datasetDTO, filesMap);
return filesMap;
}
- public Map mapDDI(File ddiFile, DatasetDTO datasetDTO ) throws ImportException {
+ public Map mapDDI(ImportType importType, File ddiFile, DatasetDTO datasetDTO ) throws ImportException {
FileInputStream in = null;
XMLStreamReader xmlr = null;
Map filesMap = new HashMap();
@@ -133,7 +154,7 @@ public Map mapDDI(File ddiFile, DatasetDTO datasetDTO ) throws ImportException
try {
in = new FileInputStream(ddiFile);
xmlr = xmlInputFactory.createXMLStreamReader(in);
- processDDI( xmlr, datasetDTO , filesMap );
+ processDDI(importType, xmlr, datasetDTO , filesMap );
} catch (FileNotFoundException ex) {
Logger.getLogger("global").log(Level.SEVERE, null, ex);
throw new EJBException("ERROR occurred in mapDDI: File Not Found!");
@@ -153,7 +174,7 @@ public Map mapDDI(File ddiFile, DatasetDTO datasetDTO ) throws ImportException
return filesMap;
}
- private void processDDI( XMLStreamReader xmlr, DatasetDTO datasetDTO, Map filesMap) throws XMLStreamException, ImportException {
+ private void processDDI(ImportType importType, XMLStreamReader xmlr, DatasetDTO datasetDTO, Map filesMap) throws XMLStreamException, ImportException {
// make sure we have a codeBook
//while ( xmlr.next() == XMLStreamConstants.COMMENT ); // skip pre root comments
@@ -175,7 +196,7 @@ private void processDDI( XMLStreamReader xmlr, DatasetDTO datasetDTO, Map filesM
// In fact, we should only use these IDs when no ID is available down
// in the study description section!
- processCodeBook(xmlr, datasetDTO, filesMap);
+ processCodeBook(importType, xmlr, datasetDTO, filesMap);
MetadataBlockDTO citationBlock = datasetDTO.getDatasetVersion().getMetadataBlocks().get("citation");
if (codeBookLevelId != null && !codeBookLevelId.equals("")) {
@@ -189,7 +210,7 @@ private void processDDI( XMLStreamReader xmlr, DatasetDTO datasetDTO, Map filesM
}
}
- if (importType.equals(ImportType.HARVEST)) {
+ if (isHarvestImport(importType)) {
datasetDTO.getDatasetVersion().setVersionState(VersionState.RELEASED);
}
@@ -213,24 +234,24 @@ public DatasetDTO initializeDataset() {
return datasetDTO;
}
- // Read the XMLStream, and populate datasetDTO and filesMap
- private void processCodeBook( XMLStreamReader xmlr, DatasetDTO datasetDTO, Map filesMap) throws XMLStreamException, ImportException {
+
+ // Read the XMLStream, and populate datasetDTO and filesMap
+ private void processCodeBook(ImportType importType, XMLStreamReader xmlr, DatasetDTO datasetDTO, Map filesMap) throws XMLStreamException, ImportException {
for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
if (event == XMLStreamConstants.START_ELEMENT) {
if (xmlr.getLocalName().equals("docDscr")) {
processDocDscr(xmlr, datasetDTO);
}
else if (xmlr.getLocalName().equals("stdyDscr")) {
- processStdyDscr(xmlr, datasetDTO);
+ processStdyDscr(importType, xmlr, datasetDTO);
}
- else if (xmlr.getLocalName().equals("fileDscr") && !importType.equals(ImportType.MIGRATION)) {
+ else if (xmlr.getLocalName().equals("fileDscr") && !isMigrationImport(importType)) {
// EMK TODO: add this back in for ImportType.NEW
- // processFileDscr(xmlr, datasetDTO, filesMap);
+ //processFileDscr(xmlr, datasetDTO, filesMap);
}
- else if (xmlr.getLocalName().equals("otherMat") && !importType.equals(ImportType.MIGRATION) ) {
- // EMK TODO: add this back in
- // processOtherMat(xmlr, studyVersion);
+ else if (xmlr.getLocalName().equals("otherMat") && (isNewImport(importType) || isHarvestWithFilesImport(importType)) ) {
+ processOtherMat(xmlr, datasetDTO, filesMap);
}
} else if (event == XMLStreamConstants.END_ELEMENT) {
@@ -309,11 +330,11 @@ private String getElementText(XMLStreamReader xmlr) throws XMLStreamException {
return content.toString();
}
- private void processStdyDscr(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws XMLStreamException, ImportException {
+ private void processStdyDscr(ImportType importType, XMLStreamReader xmlr, DatasetDTO datasetDTO) throws XMLStreamException, ImportException {
for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
if (event == XMLStreamConstants.START_ELEMENT) {
- if (xmlr.getLocalName().equals("citation")) processCitation(xmlr, datasetDTO);
+ if (xmlr.getLocalName().equals("citation")) processCitation(importType, xmlr, datasetDTO);
else if (xmlr.getLocalName().equals("stdyInfo")) processStdyInfo(xmlr, datasetDTO.getDatasetVersion());
else if (xmlr.getLocalName().equals("method")) processMethod(xmlr, datasetDTO.getDatasetVersion());
@@ -408,7 +429,7 @@ else if (xmlr.getLocalName().equals("relStdy")) {
}
}
}
- private void processCitation(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws XMLStreamException, ImportException {
+ private void processCitation(ImportType importType, XMLStreamReader xmlr, DatasetDTO datasetDTO) throws XMLStreamException, ImportException {
DatasetVersionDTO dvDTO = datasetDTO.getDatasetVersion();
MetadataBlockDTO citation=datasetDTO.getDatasetVersion().getMetadataBlocks().get("citation");
for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
@@ -418,7 +439,7 @@ private void processCitation(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws
else if (xmlr.getLocalName().equals("prodStmt")) processProdStmt(xmlr,citation);
else if (xmlr.getLocalName().equals("distStmt")) processDistStmt(xmlr,citation);
else if (xmlr.getLocalName().equals("serStmt")) processSerStmt(xmlr,citation);
- else if (xmlr.getLocalName().equals("verStmt")) processVerStmt(xmlr,dvDTO);
+ else if (xmlr.getLocalName().equals("verStmt")) processVerStmt(importType, xmlr,dvDTO);
else if (xmlr.getLocalName().equals("notes")) {
String _note = parseNoteByType( xmlr, NOTE_TYPE_UNF );
if (_note != null) {
@@ -939,8 +960,8 @@ private void processTargetSampleSize(XMLStreamReader xmlr, MetadataBlockDTO soci
Note: we should use the verStmt with source="DVN" as the 'official' version statement
DDI's that we are migrating should have one and only one DVN version statement
*/
- private void processVerStmt(XMLStreamReader xmlr, DatasetVersionDTO dvDTO) throws XMLStreamException {
- if (importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) {
+ private void processVerStmt(ImportType importType, XMLStreamReader xmlr, DatasetVersionDTO dvDTO) throws XMLStreamException {
+ if (isMigrationImport(importType) || isHarvestImport(importType)) {
if (!"DVN".equals(xmlr.getAttributeValue(null, "source"))) {
for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
if (event == XMLStreamConstants.START_ELEMENT) {
@@ -977,7 +998,7 @@ private void processVerStmt(XMLStreamReader xmlr, DatasetVersionDTO dvDTO) throw
}
}
- if (importType.equals(ImportType.NEW)) {
+ if (isNewImport(importType)) {
// If this is a new, Draft version, versionNumber and minor versionNumber are null.
dvDTO.setVersionState(VersionState.DRAFT);
}
@@ -1523,7 +1544,70 @@ private void addToSet(HashSet set, String typeName, String value ) {
}
}
-
+ private void processOtherMat(XMLStreamReader xmlr, DatasetDTO datasetDTO, Map filesMap) throws XMLStreamException {
+ FileMetadataDTO fmdDTO = new FileMetadataDTO();
+
+ if (datasetDTO.getDatasetVersion().getFileMetadatas() == null) {
+ datasetDTO.getDatasetVersion().setFileMetadatas(new ArrayList<>());
+ }
+ datasetDTO.getDatasetVersion().getFileMetadatas().add(fmdDTO);
+
+ DataFileDTO dfDTO = new DataFileDTO();
+ //if (datasetDTO.getDataFiles() == null) {
+ // datasetDTO.setDataFiles(new ArrayList<>());
+ //}
+ //datasetDTO.getDataFiles().add(dfDTO);
+
+ dfDTO.setStorageIdentifier( xmlr.getAttributeValue(null, "URI"));
+ fmdDTO.setDataFile(dfDTO);
+
+
+ // TODO: handle categories; note that multiple categories are allowed in Dataverse 4;
+ String catName = null;
+ String icpsrDesc = null;
+ String icpsrId = null;
+
+
+ for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
+ if (event == XMLStreamConstants.START_ELEMENT) {
+ if (xmlr.getLocalName().equals("labl")) {
+ // this is the file name:
+ fmdDTO.setLabel( parseText(xmlr) );
+ // TODO: in DVN3 we used to make an attempt to determine the file type
+ // based on the file name.
+ } else if (xmlr.getLocalName().equals("txt")) {
+ fmdDTO.setDescription( parseText(xmlr) );
+ } else if (xmlr.getLocalName().equals("notes")) {
+ String noteType = xmlr.getAttributeValue(null, "type");
+ if ("vdc:category".equalsIgnoreCase(noteType) ) {
+ catName = parseText(xmlr);
+ } else if ("icpsr:category".equalsIgnoreCase(noteType) ) {
+ String subjectType = xmlr.getAttributeValue(null, "subject");
+ if ("description".equalsIgnoreCase(subjectType)) {
+ icpsrDesc = parseText(xmlr);
+ } else if ("id".equalsIgnoreCase(subjectType)) {
+ icpsrId = parseText(xmlr);
+ }
+ } else if (NOTE_TYPE_CONTENTTYPE.equalsIgnoreCase(noteType)) {
+ String contentType = parseText(xmlr);
+ if (!StringUtil.isEmpty(contentType)) {
+ dfDTO.setContentType(contentType);
+ }
+ }
+ }
+ } else if (event == XMLStreamConstants.END_ELEMENT) {//
+ if (xmlr.getLocalName().equals("otherMat")) {
+ // post process
+ if (fmdDTO.getLabel() == null || fmdDTO.getLabel().trim().equals("") ) {
+ fmdDTO.setLabel("harvested file");
+ }
+
+ // TODO: categories:
+ return;
+ }
+ }
+ }
+ }
private void processFileDscr(XMLStreamReader xmlr, DatasetDTO datasetDTO, Map filesMap) throws XMLStreamException {
FileMetadataDTO fmdDTO = new FileMetadataDTO();
@@ -1544,7 +1628,7 @@ private void processFileDscr(XMLStreamReader xmlr, DatasetDTO datasetDTO, Map fi
datasetDTO.getDataFiles().add(dfDTO);
// EMK TODO: ask Gustavo about this property
- // dfDTO.setFileSystemLocation( xmlr.getAttributeValue(null, "URI"));
+ //dfDTO.setFileSystemLocation( xmlr.getAttributeValue(null, "URI"));
String ddiFileId = xmlr.getAttributeValue(null, "ID");
/// the following Strings are used to determine the category
@@ -1574,7 +1658,7 @@ else if (xmlr.getLocalName().equals("notes")) {
} else if ("id".equalsIgnoreCase(subjectType)) {
icpsrId = parseText(xmlr);
}
- }
+ }
}
} else if (event == XMLStreamConstants.END_ELEMENT) {//
if (xmlr.getLocalName().equals("fileDscr")) {
@@ -1631,22 +1715,30 @@ private String processFileTxt(XMLStreamReader xmlr, FileMetadataDTO fmdDTO, Data
fmdDTO.setLabel( parseText(xmlr) );
/*sf.setFileType( FileUtil.determineFileType( fmdDTO.getLabel() ) );*/
+ } else if (xmlr.getLocalName().equals("fileType")) {
+ String contentType = parseText(xmlr);
+ if (!StringUtil.isEmpty(contentType)) {
+ dfDTO.setContentType(contentType);
+ }
} else if (xmlr.getLocalName().equals("fileCont")) {
fmdDTO.setDescription( parseText(xmlr) );
} else if (xmlr.getLocalName().equals("dimensns")) processDimensns(xmlr, dtDTO);
} else if (event == XMLStreamConstants.END_ELEMENT) {
if (xmlr.getLocalName().equals("fileTxt")) {
- // Now is the good time to determine the type of this subsettable
- // file (now that the "" section has been parsed, we
- // should know whether it's a tab, or a fixed field:
- String subsettableFileType = "application/octet-stream"; // better this than nothing!
- if ( dtDTO.getRecordsPerCase() != null ) {
- subsettableFileType="text/x-fixed-field";
- } else {
- subsettableFileType="text/tab-separated-values";
- }
+ // If we still don't know the content type of this file
+ // (i.e., if there was no "" tag explicitly specifying
+ // the type), we can try and make an educated guess. We already
+ // now that this is a subsettable file. And now that the
+ // "" section has been parsed, we can further
+ // decide if it's a tab, or a fixed field:
+ if (StringUtil.isEmpty(dfDTO.getContentType())) {
+ String subsettableFileType = "text/tab-separated-values";
+ if (dtDTO.getRecordsPerCase() != null) {
+ subsettableFileType = "text/x-fixed-field";
+ }
+ }
//EMK TODO: ask Gustavo & Leonid what should be used here instead of setFileType
- // dfDTO.setFileType( subsettableFileType );
+ // dfDTO.setFileType( subsettableFileType );
return ddiFileId;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
index 0c578177eb5..090d92be13a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
@@ -2,6 +2,7 @@
import com.google.gson.Gson;
import edu.harvard.iq.dataverse.DatasetFieldCompoundValue;
+import edu.harvard.iq.dataverse.DatasetFieldConstant;
import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
import edu.harvard.iq.dataverse.DatasetFieldType;
import edu.harvard.iq.dataverse.DatasetVersion;
@@ -65,7 +66,10 @@ public class ImportGenericServiceBean {
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
- ForeignMetadataFormatMapping findFormatMappingByName (String name) {
+
+ public static String DCTERMS = "http://purl.org/dc/terms/";
+
+ public ForeignMetadataFormatMapping findFormatMappingByName (String name) {
try {
return em.createNamedQuery("ForeignMetadataFormatMapping.findByName", ForeignMetadataFormatMapping.class)
.setParameter("name", name)
@@ -175,6 +179,59 @@ public DatasetDTO processXML( XMLStreamReader xmlr, ForeignMetadataFormatMapping
}
+ // Helper method for importing harvested Dublin Core xml.
+ // Dublin Core is considered a mandatory, built in metadata format mapping.
+ // It is distributed as required content, in reference_data.sql.
+ // Note that arbitrary formatting tags are supported for the outer xml
+ // wrapper. -- L.A. 4.5
+ public DatasetDTO processOAIDCxml(String DcXmlToParse) throws XMLStreamException {
+ // look up DC metadata mapping:
+
+ ForeignMetadataFormatMapping dublinCoreMapping = findFormatMappingByName(DCTERMS);
+ if (dublinCoreMapping == null) {
+ throw new EJBException("Failed to find metadata mapping for " + DCTERMS);
+ }
+
+ DatasetDTO datasetDTO = this.initializeDataset();
+ StringReader reader = null;
+ XMLStreamReader xmlr = null;
+
+ try {
+ reader = new StringReader(DcXmlToParse);
+ XMLInputFactory xmlFactory = javax.xml.stream.XMLInputFactory.newInstance();
+ xmlr = xmlFactory.createXMLStreamReader(reader);
+
+ //while (xmlr.next() == XMLStreamConstants.COMMENT); // skip pre root comments
+ xmlr.nextTag();
+
+ xmlr.require(XMLStreamConstants.START_ELEMENT, null, OAI_DC_OPENING_TAG);
+
+ processXMLElement(xmlr, ":", OAI_DC_OPENING_TAG, dublinCoreMapping, datasetDTO);
+ } catch (XMLStreamException ex) {
+ throw new EJBException("ERROR occurred while parsing XML fragment (" + DcXmlToParse.substring(0, 64) + "...); ", ex);
+ }
+
+
+ datasetDTO.getDatasetVersion().setVersionState(DatasetVersion.VersionState.RELEASED);
+
+ // Our DC import handles the contents of the dc:identifier field
+ // as an "other id". In the context of OAI harvesting, we expect
+ // the identifier to be a global id, so we need to rearrange that:
+
+ String identifier = getOtherIdFromDTO(datasetDTO.getDatasetVersion());
+ logger.fine("Imported identifier: "+identifier);
+
+ String globalIdentifier = reassignIdentifierAsGlobalId(identifier, datasetDTO);
+ logger.fine("Detected global identifier: "+globalIdentifier);
+
+ if (globalIdentifier == null) {
+ throw new EJBException("Failed to find a global identifier in the OAI_DC XML record.");
+ }
+
+ return datasetDTO;
+
+ }
+
private void processXMLElement(XMLStreamReader xmlr, String currentPath, String openingTag, ForeignMetadataFormatMapping foreignFormatMapping, DatasetDTO datasetDTO) throws XMLStreamException {
logger.fine("entering processXMLElement; ("+currentPath+")");
@@ -239,23 +296,26 @@ private void processXMLElement(XMLStreamReader xmlr, String currentPath, String
MetadataBlockDTO citationBlock = datasetDTO.getDatasetVersion().getMetadataBlocks().get(mappingDefinedFieldType.getMetadataBlock().getName());
citationBlock.addField(value);
}
- } else {
-
- // Process the payload of this XML element:
- //xxString dataverseFieldName = mappingDefined.getDatasetfieldName();
- if (dataverseFieldName != null && !dataverseFieldName.equals("")) {
- FieldDTO value = FieldDTO.createPrimitiveFieldDTO(dataverseFieldName, parseText(xmlr));
- DatasetFieldType dataverseFieldType = datasetfieldService.findByNameOpt(dataverseFieldName);
- if (dataverseFieldType != null) {
- value = makeDTO(dataverseFieldType, value, dataverseFieldName);
-// value = FieldDTO.createPrimitiveFieldDTO(dataverseFieldName, parseText(xmlr));
-// FieldDTO dataverseField = FieldDTO.createCompoundFieldDTO(dataverseFieldName, value);
- MetadataBlockDTO citationBlock = datasetDTO.getDatasetVersion().getMetadataBlocks().get(mappingDefinedFieldType.getMetadataBlock().getName());
- citationBlock.addField(value);
-// TO DO replace database output with Json createDatasetFieldValue(dataverseFieldType, cachedCompoundValue, elementTextPayload, datasetVersion);
+ } else // Process the payload of this XML element:
+ //xxString dataverseFieldName = mappingDefined.getDatasetfieldName();
+ if (dataverseFieldName != null && !dataverseFieldName.equals("")) {
+ DatasetFieldType dataverseFieldType = datasetfieldService.findByNameOpt(dataverseFieldName);
+ FieldDTO value;
+ if (dataverseFieldType != null) {
+
+ if (dataverseFieldType.isControlledVocabulary()) {
+ value = FieldDTO.createVocabFieldDTO(dataverseFieldName, parseText(xmlr));
} else {
- throw new EJBException("Bad foreign metadata field mapping: no such DatasetField " + dataverseFieldName + "!");
+ value = FieldDTO.createPrimitiveFieldDTO(dataverseFieldName, parseText(xmlr));
}
+ value = makeDTO(dataverseFieldType, value, dataverseFieldName);
+ // value = FieldDTO.createPrimitiveFieldDTO(dataverseFieldName, parseText(xmlr));
+ // FieldDTO dataverseField = FieldDTO.createCompoundFieldDTO(dataverseFieldName, value);
+ MetadataBlockDTO citationBlock = datasetDTO.getDatasetVersion().getMetadataBlocks().get(mappingDefinedFieldType.getMetadataBlock().getName());
+ citationBlock.addField(value);
+ // TO DO replace database output with Json createDatasetFieldValue(dataverseFieldType, cachedCompoundValue, elementTextPayload, datasetVersion);
+ } else {
+ throw new EJBException("Bad foreign metadata field mapping: no such DatasetField " + dataverseFieldName + "!");
}
}
} else {
@@ -271,10 +331,11 @@ private void processXMLElement(XMLStreamReader xmlr, String currentPath, String
private FieldDTO makeDTO(DatasetFieldType dataverseFieldType, FieldDTO value, String dataverseFieldName) {
if (dataverseFieldType.isAllowMultiples()){
- if(dataverseFieldType.isCompound()){
+ if(dataverseFieldType.isCompound()) {
value = FieldDTO.createMultipleCompoundFieldDTO(dataverseFieldName, value);
- }
- else {
+ } else if (dataverseFieldType.isControlledVocabulary()) {
+ value = FieldDTO.createMultipleVocabFieldDTO(dataverseFieldName, Arrays.asList(value.getSinglePrimitive()));
+ } else {
value = FieldDTO.createMultiplePrimitiveFieldDTO(dataverseFieldName, Arrays.asList(value.getSinglePrimitive()));
}
if (dataverseFieldType.isChild()) {
@@ -289,6 +350,10 @@ private FieldDTO makeDTO(DatasetFieldType dataverseFieldType, FieldDTO value, St
value = FieldDTO.createCompoundFieldDTO(dataverseFieldName, value);
}
}
+
+ // TODO:
+ // it looks like the code below has already been executed, in one of the
+ // if () blocks above... is this ok to be doing it again?? -- L.A. 4.5
if (dataverseFieldType.isChild()) {
DatasetFieldType parentDatasetFieldType = dataverseFieldType.getParentDatasetFieldType();
if (parentDatasetFieldType.isAllowMultiples()) {
@@ -299,7 +364,70 @@ private FieldDTO makeDTO(DatasetFieldType dataverseFieldType, FieldDTO value, St
return value;
}
+ private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.otherId.equals(fieldDTO.getTypeName())) {
+ String otherId = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.otherIdValue.equals(next.getTypeName())) {
+ otherId = next.getSinglePrimitive();
+ }
+ }
+ if (!otherId.isEmpty()){
+ return otherId;
+ }
+ }
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+ private String reassignIdentifierAsGlobalId(String identifierString, DatasetDTO datasetDTO) {
+
+ int index1 = identifierString.indexOf(':');
+ int index2 = identifierString.lastIndexOf('/');
+ if (index1==-1) {
+ logger.warning("Error parsing identifier: " + identifierString + ". ':' not found in string");
+ return null;
+ }
+ String protocol = identifierString.substring(0, index1);
+
+ if (!"doi".equals(protocol) && !"hdl".equals(protocol)) {
+ logger.warning("Unsupported protocol: "+identifierString);
+ return null;
+ }
+
+
+ if (index2 == -1) {
+ logger.warning("Error parsing identifier: " + identifierString + ". Second separator not found in string");
+ return null;
+ }
+
+ String authority = identifierString.substring(index1+1, index2);
+ String identifier = identifierString.substring(index2+1);
+
+ datasetDTO.setProtocol(protocol);
+ datasetDTO.setDoiSeparator("/");
+ datasetDTO.setAuthority(authority);
+ datasetDTO.setIdentifier(identifier);
+
+ // reassemble and return:
+ return protocol + ":" + authority + "/" + identifier;
+ }
+
+
+ public static final String OAI_DC_OPENING_TAG = "dc";
+ public static final String DCTERMS_OPENING_TAG = "dcterms";
+
public static final String SOURCE_DVN_3_0 = "DVN_3_0";
public static final String NAMING_PROTOCOL_HANDLE = "hdl";
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
index 629032843eb..4c5865d560e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
@@ -7,6 +7,7 @@
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
+import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetField;
import edu.harvard.iq.dataverse.DatasetFieldConstant;
@@ -19,6 +20,7 @@
import edu.harvard.iq.dataverse.DataverseContact;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.EjbDataverseEngine;
+import edu.harvard.iq.dataverse.ForeignMetadataFormatMapping;
import edu.harvard.iq.dataverse.MetadataBlockServiceBean;
import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType;
@@ -30,6 +32,8 @@
import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.json.JsonParseException;
import edu.harvard.iq.dataverse.util.json.JsonParser;
@@ -49,11 +53,14 @@
import javax.ejb.EJBException;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
+import javax.ejb.TransactionAttributeType;
import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.json.JsonReader;
+import javax.persistence.EntityManager;
+import javax.persistence.PersistenceContext;
import javax.servlet.http.HttpServletRequest;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
@@ -71,6 +78,9 @@
*/
@Stateless
public class ImportServiceBean {
+ @PersistenceContext(unitName="VDCNet-ejbPU")
+ private EntityManager em;
+
private static final Logger logger = Logger.getLogger(ImportServiceBean.class.getCanonicalName());
@EJB
@@ -87,8 +97,13 @@ public class ImportServiceBean {
@EJB
SettingsServiceBean settingsService;
- @EJB ImportDDIServiceBean importDDIService;
+ @EJB
+ ImportDDIServiceBean importDDIService;
+ @EJB
+ ImportGenericServiceBean importGenericService;
+ @EJB
+ IndexServiceBean indexService;
/**
* This is just a convenience method, for testing migration. It creates
* a dummy dataverse with the directory name as dataverse name & alias.
@@ -185,6 +200,187 @@ public JsonObjectBuilder handleFile(DataverseRequest dataverseRequest, Dataverse
}
}
+ @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
+ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, String harvestIdentifier, String metadataFormat, File metadataFile, PrintWriter cleanupLog) throws ImportException, IOException {
+ if (harvestingClient == null || harvestingClient.getDataverse() == null) {
+ throw new ImportException("importHarvestedDataset called wiht a null harvestingClient, or an invalid harvestingClient.");
+ }
+ Dataverse owner = harvestingClient.getDataverse();
+ Dataset importedDataset = null;
+
+ DatasetDTO dsDTO = null;
+ String json = null;
+
+ // TODO:
+ // At the moment (4.5; the first official "export/harvest release"), there
+ // are 3 supported metadata formats: DDI, DC and native Dataverse metadata
+ // encoded in JSON. The 2 XML formats are handled by custom implementations;
+ // each of the 2 implementations uses its own parsing approach. (see the
+ // ImportDDIServiceBean and ImportGenerciServiceBean for details).
+ // TODO: Need to create a system of standardized import plugins - similar to Stephen
+ // Kraffmiller's export modules; replace the logic below with clean
+ // programmatic lookup of the import plugin needed.
+
+ if ("ddi".equalsIgnoreCase(metadataFormat) || "oai_ddi".equals(metadataFormat)) {
+ try {
+ String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
+ // TODO:
+ // import type should be configurable - it should be possible to
+ // select whether you want to harvest with or without files,
+ // ImportType.HARVEST vs. ImportType.HARVEST_WITH_FILES
+ logger.fine("importing DDI "+metadataFile.getAbsolutePath());
+ dsDTO = importDDIService.doImport(ImportType.HARVEST_WITH_FILES, xmlToParse);
+ } catch (XMLStreamException e) {
+ throw new ImportException("XMLStreamException" + e);
+ }
+ } else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) {
+ logger.fine("importing DC "+metadataFile.getAbsolutePath());
+ try {
+ String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
+ dsDTO = importGenericService.processOAIDCxml(xmlToParse);
+ } catch (XMLStreamException e) {
+ throw new ImportException("XMLStreamException processing Dublin Core XML record: "+e.getMessage());
+ }
+ } else if ("dataverse_json".equals(metadataFormat)) {
+ // This is Dataverse metadata already formatted in JSON.
+ // Simply read it into a string, and pass to the final import further down:
+ logger.fine("Attempting to import custom dataverse metadata from file "+metadataFile.getAbsolutePath());
+ json = new String(Files.readAllBytes(metadataFile.toPath()));
+ } else {
+ throw new ImportException("Unsupported import metadata format: " + metadataFormat);
+ }
+
+ if (json == null) {
+ if (dsDTO != null ) {
+ // convert DTO to Json,
+ Gson gson = new GsonBuilder().setPrettyPrinting().create();
+ json = gson.toJson(dsDTO);
+ logger.fine("JSON produced for the metadata harvested: "+json);
+ } else {
+ throw new ImportException("Failed to transform XML metadata format "+metadataFormat+" into a DatasetDTO");
+ }
+ }
+
+ JsonReader jsonReader = Json.createReader(new StringReader(json));
+ JsonObject obj = jsonReader.readObject();
+ //and call parse Json to read it into a dataset
+ try {
+ JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
+ parser.setLenient(true);
+ Dataset ds = parser.parseDataset(obj);
+
+ // For ImportType.NEW, if the metadata contains a global identifier, and it's not a protocol
+ // we support, it should be rejected.
+ // (TODO: ! - add some way of keeping track of supported protocols!)
+ //if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
+ // throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
+ //}
+ ds.setOwner(owner);
+ ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
+
+ // Check data against required contraints
+ List violations = ds.getVersions().get(0).validateRequired();
+ if (!violations.isEmpty()) {
+ // For migration and harvest, add NA for missing required values
+ for (ConstraintViolation v : violations) {
+ DatasetField f = ((DatasetField) v.getRootBean());
+ f.setSingleValue(DatasetField.NA_VALUE);
+ }
+ }
+
+ // Check data against validation constraints
+ // If we are migrating and "scrub migration data" is true we attempt to fix invalid data
+ // if the fix fails stop processing of this file by throwing exception
+ Set invalidViolations = ds.getVersions().get(0).validate();
+ ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
+ Validator validator = factory.getValidator();
+ if (!invalidViolations.isEmpty()) {
+ for (ConstraintViolation v : invalidViolations) {
+ DatasetFieldValue f = ((DatasetFieldValue) v.getRootBean());
+ boolean fixed = false;
+ boolean converted = false;
+ // TODO: Is this scrubbing something we want to continue doing?
+ if (settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
+ fixed = processMigrationValidationError(f, cleanupLog, metadataFile.getName());
+ converted = true;
+ if (fixed) {
+ Set> scrubbedViolations = validator.validate(f);
+ if (!scrubbedViolations.isEmpty()) {
+ fixed = false;
+ }
+ }
+ }
+ if (!fixed) {
+ String msg = "Data modified - File: " + metadataFile.getName() + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; "
+ + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
+ cleanupLog.println(msg);
+ f.setValue(DatasetField.NA_VALUE);
+
+ }
+ }
+ }
+
+ // A Global ID is required, in order for us to be able to harvest and import
+ // this dataset:
+ if (StringUtils.isEmpty(ds.getGlobalId())) {
+ throw new ImportException("The harvested metadata record with the OAI server identifier "+harvestIdentifier+" does not contain a global unique identifier that we could recognize, skipping.");
+ }
+
+ ds.setHarvestedFrom(harvestingClient);
+ ds.setHarvestIdentifier(harvestIdentifier);
+
+ Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
+
+ if (existingDs != null) {
+ // If this dataset already exists IN ANOTHER DATAVERSE
+ // we are just going to skip it!
+ if (existingDs.getOwner() != null && !owner.getId().equals(existingDs.getOwner().getId())) {
+ throw new ImportException("The dataset with the global id "+ds.getGlobalId()+" already exists, in the dataverse "+existingDs.getOwner().getAlias()+", skipping.");
+ }
+ // And if we already have a dataset with this same id, in this same
+ // dataverse, but it is LOCAL dataset (can happen!), we're going to
+ // skip it also:
+ if (!existingDs.isHarvested()) {
+ throw new ImportException("A LOCAL dataset with the global id "+ds.getGlobalId()+" already exists in this dataverse; skipping.");
+ }
+ // For harvested datasets, there should always only be one version.
+ // We will replace the current version with the imported version.
+ if (existingDs.getVersions().size() != 1) {
+ throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
+ }
+ // Purge all the SOLR documents associated with this client from the
+ // index server:
+ indexService.deleteHarvestedDocuments(existingDs);
+ // files from harvested datasets are removed unceremoniously,
+ // directly in the database. no need to bother calling the
+ // DeleteFileCommand on them.
+ for (DataFile harvestedFile : existingDs.getFiles()) {
+ DataFile merged = em.merge(harvestedFile);
+ em.remove(merged);
+ harvestedFile = null;
+ }
+ // TODO:
+ // Verify what happens with the indexed files in SOLR?
+ // are they going to be overwritten by the reindexing of the dataset?
+ existingDs.setFiles(null);
+ Dataset merged = em.merge(existingDs);
+ engineSvc.submit(new DestroyDatasetCommand(merged, dataverseRequest));
+ importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
+
+ } else {
+ importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
+ }
+
+ } catch (JsonParseException ex) {
+ logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage());
+ throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex);
+ } catch (CommandException ex) {
+ logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage());
+ throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex);
+ }
+ return importedDataset;
+ }
+
public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException {
String status = "";
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java
index 0ce08e019fc..c1165bbb7f1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java
@@ -10,6 +10,6 @@
* @author ellenk
*/
public interface ImportUtil {
- public enum ImportType{ NEW, MIGRATION, HARVEST};
+ public enum ImportType{ NEW, MIGRATION, HARVEST, HARVEST_WITH_FILES};
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
index 5f2e274a330..af826e622d9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
@@ -18,6 +18,8 @@
import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider;
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.passwordreset.PasswordResetData;
+import edu.harvard.iq.dataverse.passwordreset.PasswordResetServiceBean;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.Calendar;
@@ -72,6 +74,9 @@ public class AuthenticationServiceBean {
@EJB
UserNotificationServiceBean userNotificationService;
+ @EJB
+ PasswordResetServiceBean passwordResetServiceBean;
+
@PersistenceContext(unitName = "VDCNet-ejbPU")
private EntityManager em;
@@ -244,6 +249,16 @@ public AuthenticatedUser getAuthenticatedUser( String identifier ) {
return null;
}
}
+
+ public AuthenticatedUser getAdminUser() {
+ try {
+ return em.createNamedQuery("AuthenticatedUser.findAdminUser", AuthenticatedUser.class)
+ .setMaxResults(1)
+ .getSingleResult();
+ } catch (Exception ex) {
+ return null;
+ }
+ }
public AuthenticatedUser getAuthenticatedUserByEmail( String email ) {
try {
@@ -503,6 +518,11 @@ public AuthenticatedUser convertBuiltInToShib(AuthenticatedUser builtInUserToCon
String builtinUsername = builtInUserIdentifier.replaceFirst(AuthenticatedUser.IDENTIFIER_PREFIX, "");
BuiltinUser builtin = builtinUserServiceBean.findByUserName(builtinUsername);
if (builtin != null) {
+ // These were created by AuthenticationResponse.Status.BREAKOUT in ShibServiceBean.canLogInAsBuiltinUser
+ List oldTokens = passwordResetServiceBean.findPasswordResetDataByDataverseUser(builtin);
+ for (PasswordResetData oldToken : oldTokens) {
+ em.remove(oldToken);
+ }
em.remove(builtin);
} else {
logger.info("Couldn't delete builtin user because could find it based on username " + builtinUsername);
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java
index cbb5a7149ac..0f36898f0e2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java
@@ -53,9 +53,15 @@ public class DataverseRole implements Serializable {
public static final String FULL_CONTRIBUTOR = "fullContributor";
public static final String DV_CONTRIBUTOR = "dvContributor";
public static final String DS_CONTRIBUTOR = "dsContributor";
+ /**
+ * Heads up that this says "editor" which comes from
+ * scripts/api/data/role-editor.json but the name is "Contributor". The
+ * *alias* is "editor". Don't be fooled!
+ */
public static final String EDITOR = "editor";
public static final String MANAGER = "manager";
public static final String CURATOR = "curator";
+ public static final String MEMBER = "member";
public static final Comparator CMP_BY_NAME = new Comparator(){
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java
index 55af770edc3..5f494757d66 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java
@@ -55,6 +55,7 @@ public void setup() {
addGroupProvider( ipGroupProvider = new IpGroupProvider(ipGroupsService) );
addGroupProvider( shibGroupProvider = new ShibGroupProvider(shibGroupService) );
addGroupProvider( explicitGroupProvider = explicitGroupService.getProvider() );
+ Logger.getLogger(GroupServiceBean.class.getName()).log(Level.INFO, null, "PostConstruct group service call");
}
public Group getGroup( String groupAlias ) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtil.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtil.java
new file mode 100644
index 00000000000..92e181264e7
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtil.java
@@ -0,0 +1,29 @@
+package edu.harvard.iq.dataverse.authorization.groups;
+
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import java.util.Set;
+
+public class GroupUtil {
+
+ /**
+ * @param authenticatedUser An non-null AuthenticatedUser.
+ * @param groups The groups associated with an AuthenticatedUser.
+ * @return A list of identifiers for the user including groups, single
+ * quoted, and separated by commas. Null if a null AuthenticatedUser is
+ * passed.
+ */
+ public static String getAllIdentifiersForUser(AuthenticatedUser authenticatedUser, Set groups) {
+ if (authenticatedUser == null) {
+ return null;
+ }
+ StringBuilder sb = new StringBuilder();
+ sb.append("'").append(authenticatedUser.getIdentifier()).append("'");
+ if (groups != null) {
+ groups.stream().forEach((group) -> {
+ sb.append(", ").append("'").append(group.getIdentifier()).append("'");
+ });
+ }
+ return sb.toString();
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java
index 5ca62f45bf0..3b6348737ed 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java
@@ -30,7 +30,7 @@ public class IPv4Range extends IpAddressRange implements java.io.Serializable {
@GeneratedValue
Long id;
- /** The most significant bits of {@code this} range's top addre, i.e the first two numbers of the IP address */
+ /** The most significant bits of {@code this} range's top address, i.e the first two numbers of the IP address */
long topAsLong;
/** The least significant bits, i.e the last tow numbers of the IP address */
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java
index 257c42f3a5a..246ec5856f8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java
@@ -8,11 +8,13 @@
import com.google.gson.JsonSyntaxException;
import edu.harvard.iq.dataverse.Shib;
import edu.harvard.iq.dataverse.authorization.AuthenticationRequest;
+import edu.harvard.iq.dataverse.authorization.AuthenticationResponse;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
+import edu.harvard.iq.dataverse.authorization.providers.builtin.PasswordEncryption;
import static edu.harvard.iq.dataverse.authorization.providers.shib.ShibUtil.getRandomUserStatic;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
@@ -188,6 +190,30 @@ public AuthenticatedUser canLogInAsBuiltinUser(String username, String password)
return au;
} catch (AuthenticationFailedException ex) {
logger.info("The username and/or password entered is invalid: " + ex.getResponse().getMessage());
+ if (AuthenticationResponse.Status.BREAKOUT.equals(ex.getResponse().getStatus())) {
+ /**
+ * Note that this "BREAKOUT" status creates PasswordResetData!
+ * We'll delete it just before blowing away the BuiltinUser in
+ * AuthenticationServiceBean.convertBuiltInToShib
+ */
+ logger.info("AuthenticationFailedException caught in canLogInAsBuiltinUser: The username and/or password entered is invalid: " + ex.getResponse().getMessage() + " - Maybe the user (" + username + ") hasn't upgraded their password? Checking the old password...");
+ BuiltinUser builtinUser = builtinUserService.findByUsernameOrEmail(username);
+ if (builtinUser != null) {
+ boolean userAuthenticated = PasswordEncryption.getVersion(builtinUser.getPasswordEncryptionVersion()).check(password, builtinUser.getEncryptedPassword());
+ if (userAuthenticated == true) {
+ AuthenticatedUser authUser = authSvc.lookupUser(BuiltinAuthenticationProvider.PROVIDER_ID, builtinUser.getUserName());
+ if (authUser != null) {
+ return authUser;
+ } else {
+ logger.info("canLogInAsBuiltinUser: Couldn't find AuthenticatedUser based on BuiltinUser username " + builtinUser.getUserName());
+ }
+ } else {
+ logger.info("canLogInAsBuiltinUser: User doesn't know old pre-bcrypt password either.");
+ }
+ } else {
+ logger.info("canLogInAsBuiltinUser: Couldn't run `check` because no BuiltinUser found with username " + username);
+ }
+ }
return null;
} catch (EJBException ex) {
Throwable cause = ex;
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java
index f609ed959e2..c8b07966f4e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java
@@ -37,7 +37,11 @@
@NamedQuery( name="AuthenticatedUser.filter",
query="select au from AuthenticatedUser au WHERE ("
+ "au.userIdentifier like :query OR "
- + "lower(concat(au.firstName,' ',au.lastName)) like lower(:query))")
+ + "lower(concat(au.firstName,' ',au.lastName)) like lower(:query))"),
+ @NamedQuery( name="AuthenticatedUser.findAdminUser",
+ query="select au from AuthenticatedUser au WHERE "
+ + "au.superuser = true "
+ + "order by au.id")
})
@Entity
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java
new file mode 100644
index 00000000000..947468fab11
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java
@@ -0,0 +1,64 @@
+package edu.harvard.iq.dataverse.authorization.users;
+
+import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+
+/**
+ * A PrivateUrlUser is virtual in the sense that it does not have a row in the
+ * authenticateduser table. It exists so when a Private URL is enabled for a
+ * dataset, we can assign a read-only role ("member") to the identifier for the
+ * PrivateUrlUser. (We will make no attempt to internationalize the identifier,
+ * which is stored in the roleassignment table.)
+ */
+public class PrivateUrlUser implements User {
+
+ public static final String PREFIX = "#";
+
+ /**
+ * In the future, this could probably be dvObjectId rather than datasetId,
+ * if necessary. It's really just roleAssignment.getDefinitionPoint(), which
+ * is a DvObject.
+ */
+ private final long datasetId;
+
+ public PrivateUrlUser(long datasetId) {
+ this.datasetId = datasetId;
+ }
+
+ public long getDatasetId() {
+ return datasetId;
+ }
+
+ /**
+ * @return By always returning false for isAuthenticated(), we prevent a
+ * name from appearing in the corner as well as preventing an account page
+ * and MyData from being accessible. The user can still navigate to the home
+ * page but can only see published datasets.
+ */
+ @Override
+ public boolean isAuthenticated() {
+ return false;
+ }
+
+ @Override
+ public boolean isBuiltInUser() {
+ return false;
+ }
+
+ @Override
+ public boolean isSuperuser() {
+ return false;
+ }
+
+ @Override
+ public String getIdentifier() {
+ return PREFIX + datasetId;
+ }
+
+ @Override
+ public RoleAssigneeDisplayInfo getDisplayInfo() {
+ String title = BundleUtil.getStringFromBundle("dataset.privateurl.roleassigeeTitle");
+ return new RoleAssigneeDisplayInfo(title, null);
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java
index 3a5203d414a..29863361a30 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/User.java
@@ -4,18 +4,19 @@
import java.io.Serializable;
/**
- * A user of the dataverse system. Intuitively a single real person in real life, but
- * some corner cases exist (e.g. {@link GuestUser}, who stands for many people).
+ * A user of the dataverse system. Intuitively a single real person in real
+ * life, but some corner cases exist (e.g. {@link GuestUser}, who stands for
+ * many people, or {@link PrivateUrlUser}, another virtual user).
*/
public interface User extends RoleAssignee, Serializable {
public boolean isAuthenticated();
- // TODO remove this, should be handles in a more generic fashion,
+ // TODO remove this, should be handled in a more generic fashion,
// e.g. getUserProvider and get the provider's URL from there. This
// would allow Shib-based editing as well.
public boolean isBuiltInUser();
-
+
public boolean isSuperuser();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
index 2bf5e719c5f..daaa1ee0fff 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
@@ -25,9 +25,12 @@
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
import edu.harvard.iq.dataverse.engine.DataverseEngine;
+import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
import edu.harvard.iq.dataverse.search.SolrIndexServiceBean;
import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.util.SystemConfig;
import javax.persistence.EntityManager;
/**
@@ -37,62 +40,75 @@
* @author michael
*/
public interface CommandContext {
-
- public EntityManager em();
-
- public DataverseEngine engine();
-
- public DvObjectServiceBean dvObjects();
-
- public DatasetServiceBean datasets();
-
- public DataverseServiceBean dataverses();
-
- public DataverseRoleServiceBean roles();
-
- public BuiltinUserServiceBean builtinUsers();
-
- public IndexServiceBean index();
+
+ /**
+ * Note: While this method is not deprecated *yet*, please consider not
+ * using it, and using a method on the service bean instead. Using the em
+ * directly makes the command less testable.
+ *
+ * @return the entity manager
+ */
+ public EntityManager em();
+
+ public DataverseEngine engine();
+
+ public DvObjectServiceBean dvObjects();
+
+ public DatasetServiceBean datasets();
+
+ public DataverseServiceBean dataverses();
+
+ public DataverseRoleServiceBean roles();
+
+ public BuiltinUserServiceBean builtinUsers();
+
+ public IndexServiceBean index();
public SolrIndexServiceBean solrIndex();
-
- public SearchServiceBean search();
-
- public PermissionServiceBean permissions();
+
+ public SearchServiceBean search();
+ public IngestServiceBean ingest();
+
+ public PermissionServiceBean permissions();
+
public RoleAssigneeServiceBean roleAssignees();
-
- public DataverseFacetServiceBean facets();
-
- public FeaturedDataverseServiceBean featuredDataverses();
-
- public DataFileServiceBean files();
-
+
+ public DataverseFacetServiceBean facets();
+
+ public FeaturedDataverseServiceBean featuredDataverses();
+
+ public DataFileServiceBean files();
+
public TemplateServiceBean templates();
-
+
public SavedSearchServiceBean savedSearches();
-
+
public DataverseFieldTypeInputLevelServiceBean fieldTypeInputLevels();
-
+
public DOIEZIdServiceBean doiEZId();
-
+
public DOIDataCiteServiceBean doiDataCite();
-
+
public HandlenetServiceBean handleNet();
-
+
public GuestbookServiceBean guestbooks();
-
+
public GuestbookResponseServiceBean responses();
-
+
public DataverseLinkingServiceBean dvLinking();
-
+
public DatasetLinkingServiceBean dsLinking();
-
- public SettingsServiceBean settings();
-
+
+ public SettingsServiceBean settings();
+
public ExplicitGroupServiceBean explicitGroups();
-
+
public UserNotificationServiceBean notifications();
-
+
public AuthenticationServiceBean authentication();
+
+ public SystemConfig systemConfig();
+
+ public PrivateUrlServiceBean privateUrl();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java
index f6624d03cc1..f1c628ae0f3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java
@@ -2,6 +2,7 @@
import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress;
import edu.harvard.iq.dataverse.authorization.users.User;
+import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.http.HttpServletRequest;
@@ -17,7 +18,7 @@ public class DataverseRequest {
private final User user;
private final IpAddress sourceAddress;
private static final Logger logger = Logger.getLogger(DataverseRequest.class.getCanonicalName());
-
+
public DataverseRequest(User aUser, HttpServletRequest aHttpServletRequest) {
this.user = aUser;
String remoteAddressStr = null;
@@ -58,7 +59,6 @@ As of now (4.2.3), this is the ONLY situation where we check the remote
-- L.A. 4.2.3
*/
-
logger.fine("DataverseRequest: Obtained remote address: "+remoteAddressStr);
if ( remoteAddressStr == null ) {
@@ -67,6 +67,11 @@ As of now (4.2.3), this is the ONLY situation where we check the remote
sourceAddress = IpAddress.valueOf( remoteAddressStr );
}
+ public DataverseRequest( User aUser, IpAddress aSourceAddress ) {
+ user = aUser;
+ sourceAddress = aSourceAddress;
+ }
+
public User getUser() {
return user;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java
index e02eb7d01be..767bee92619 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java
@@ -29,25 +29,28 @@ public class AssignRoleCommand extends AbstractCommand {
private final DataverseRole role;
private final RoleAssignee grantee;
private final DvObject defPoint;
+ private final String privateUrlToken;
/**
* @param anAssignee The user being granted the role
* @param aRole the role being granted to the user
* @param assignmentPoint the dataverse on which the role is granted.
* @param aRequest
+ * @param privateUrlToken An optional token used by the Private Url feature.
*/
- public AssignRoleCommand(RoleAssignee anAssignee, DataverseRole aRole, DvObject assignmentPoint, DataverseRequest aRequest) {
+ public AssignRoleCommand(RoleAssignee anAssignee, DataverseRole aRole, DvObject assignmentPoint, DataverseRequest aRequest, String privateUrlToken) {
// for data file check permission on owning dataset
super(aRequest, assignmentPoint instanceof DataFile ? assignmentPoint.getOwner() : assignmentPoint);
role = aRole;
grantee = anAssignee;
defPoint = assignmentPoint;
+ this.privateUrlToken = privateUrlToken;
}
@Override
public RoleAssignment execute(CommandContext ctxt) throws CommandException {
// TODO make sure the role is defined on the dataverse.
- RoleAssignment roleAssignment = new RoleAssignment(role, grantee, defPoint);
+ RoleAssignment roleAssignment = new RoleAssignment(role, grantee, defPoint, privateUrlToken);
return ctxt.roles().save(roleAssignment);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java
index 8684338bc7b..25d2231708a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java
@@ -175,7 +175,8 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
logger.log(Level.FINE, "after db update {0}", formatter.format(new Date().getTime()));
// set the role to be default contributor role for its dataverse
if (importType==null || importType.equals(ImportType.NEW)) {
- ctxt.roles().save(new RoleAssignment(savedDataset.getOwner().getDefaultContributorRole(), getRequest().getUser(), savedDataset));
+ String privateUrlToken = null;
+ ctxt.roles().save(new RoleAssignment(savedDataset.getOwner().getDefaultContributorRole(), getRequest().getUser(), savedDataset, privateUrlToken));
}
savedDataset.setPermissionModificationTime(new Timestamp(new Date().getTime()));
@@ -191,11 +192,27 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
*/
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(savedDataset, doNormalSolrDocCleanUp);
-
- } catch ( RuntimeException e ) {
- logger.log(Level.WARNING, "Exception while indexing:" + e.getMessage(), e);
+
+ } catch ( Exception e ) { // RuntimeException e ) {
+ logger.log(Level.WARNING, "Exception while indexing:" + e.getMessage()); //, e);
+ /**
+ * Even though the original intention appears to have been to allow the
+ * dataset to be successfully created, even if an exception is thrown during
+ * the indexing - in reality, a runtime exception there, even caught,
+ * still forces the EJB transaction to be rolled back; hence the
+ * dataset is NOT created... but the command completes and exits as if
+ * it has been successful.
+ * So I am going to throw a Command Exception here, to avoid this.
+ * If we DO want to be able to create datasets even if they cannot
+ * be immediately indexed, we'll have to figure out how to do that.
+ * (Note that import is still possible when Solr is down - because indexDataset()
+ * does NOT throw an exception if it is.
+ * -- L.A. 4.5
+ */
+ throw new CommandException("Dataset could not be created. Indexing failed", this);
+
}
- logger.log(Level.FINE, "after index {0}", formatter.format(new Date().getTime()));
+ logger.log(Level.FINE, "after index {0}", formatter.format(new Date().getTime()));
// if we are not migrating, assign the user to this version
if (importType==null || importType.equals(ImportType.NEW)) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java
index 8742a35679c..060da0565ca 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java
@@ -51,6 +51,7 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException {
throw new IllegalCommandException("Latest version is already a draft. Cannot add another draft", this);
}
}
+ newVersion.setDataset(dataset);
newVersion.setDatasetFields(newVersion.initDatasetFields());
Set constraintViolations = newVersion.validate();
@@ -78,7 +79,6 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException {
FileMetadata fmdCopy = fmd.createCopy();
fmdCopy.setDatasetVersion(newVersion);
newVersionMetadatum.add( fmdCopy );
- logger.info( "added file metadata " + fmdCopy );
}
newVersion.setFileMetadatas(newVersionMetadatum);
@@ -88,12 +88,16 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException {
newVersion.setLastUpdateTime(now);
dataset.setModificationTime(now);
newVersion.setDataset(dataset);
- ctxt.em().persist(newVersion);
-
+ final List currentVersions = dataset.getVersions();
+ ArrayList dsvs = new ArrayList<>(currentVersions.size());
+ dsvs.addAll(currentVersions);
+ dsvs.set(0, newVersion);
+ dataset.setVersions( dsvs );
+
// TODO make async
- // ctxt.index().indexDataset(dataset);
+ // ctxt.index().indexDataset(dataset);
+ return ctxt.datasets().storeVersion(newVersion);
- return newVersion;
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
index c3234d2e1a8..c64995a6958 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
@@ -7,6 +7,7 @@
import edu.harvard.iq.dataverse.RoleAssignment;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -52,7 +53,7 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
if (created.getOwner() == null) {
if (ctxt.dataverses().isRootDataverseExists()) {
- throw new CommandException("Root Dataverse already exists. Cannot create another one", this);
+ throw new IllegalCommandException("Root Dataverse already exists. Cannot create another one", this);
}
}
@@ -61,7 +62,12 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
}
if (created.getCreator() == null) {
- created.setCreator((AuthenticatedUser) getRequest().getUser());
+ final User user = getRequest().getUser();
+ if ( user.isAuthenticated() ) {
+ created.setCreator((AuthenticatedUser) user);
+ } else {
+ throw new IllegalCommandException("Guest users cannot create a Dataverse.", this);
+ }
}
if (created.getDataverseType() == null) {
@@ -86,7 +92,8 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
// Find the built in admin role (currently by alias)
DataverseRole adminRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.ADMIN);
- ctxt.roles().save(new RoleAssignment(adminRole, getRequest().getUser(), managedDv));
+ String privateUrlToken = null;
+ ctxt.roles().save(new RoleAssignment(adminRole, getRequest().getUser(), managedDv, privateUrlToken));
managedDv.setPermissionModificationTime(new Timestamp(new Date().getTime()));
managedDv = ctxt.dataverses().save(managedDv);
@@ -96,12 +103,11 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
ctxt.facets().deleteFacetsFor(managedDv);
int i = 0;
for (DatasetFieldType df : facetList) {
- ctxt.facets().create(i++, df.getId(), managedDv.getId());
+ ctxt.facets().create(i++, df, managedDv);
}
}
if (inputLevelList != null) {
-
ctxt.fieldTypeInputLevels().deleteFacetsFor(managedDv);
for (DataverseFieldTypeInputLevel obj : inputLevelList) {
obj.setDataverse(managedDv);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestingClientCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestingClientCommand.java
new file mode 100644
index 00000000000..bf06fbd23b4
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestingClientCommand.java
@@ -0,0 +1,40 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+
+/**
+ *
+ * @author Leonid Andreev
+ */
+@RequiredPermissions( Permission.EditDataverse )
+public class CreateHarvestingClientCommand extends AbstractCommand {
+
+ private final Dataverse dv;
+ private final HarvestingClient harvestingClient;
+
+ public CreateHarvestingClientCommand(DataverseRequest aRequest, HarvestingClient harvestingClient) {
+ super(aRequest, harvestingClient.getDataverse());
+ this.harvestingClient = harvestingClient;
+ dv = harvestingClient.getDataverse();
+ }
+
+ @Override
+ public HarvestingClient execute(CommandContext ctxt) throws CommandException {
+ // TODO: check if the harvesting client config is legit;
+ // and that it is indeed new and unique?
+ // (may not be necessary - as the uniqueness should be enforced by
+ // the persistence layer... - but could still be helpful to have a dedicated
+ // custom exception for "nickname already taken". see CreateExplicitGroupCommand
+ // for an example. -- L.A. 4.4)
+
+ return ctxt.em().merge(this.harvestingClient);
+ }
+
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommand.java
new file mode 100644
index 00000000000..cc1adbc984a
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommand.java
@@ -0,0 +1,68 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.RoleAssignment;
+import edu.harvard.iq.dataverse.authorization.DataverseRole;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
+import java.util.UUID;
+import java.util.logging.Logger;
+
+@RequiredPermissions(Permission.ManageDatasetPermissions)
+public class CreatePrivateUrlCommand extends AbstractCommand {
+
+ private static final Logger logger = Logger.getLogger(CreatePrivateUrlCommand.class.getCanonicalName());
+
+ final Dataset dataset;
+
+ public CreatePrivateUrlCommand(DataverseRequest dataverseRequest, Dataset theDataset) {
+ super(dataverseRequest, theDataset);
+ dataset = theDataset;
+ }
+
+ @Override
+ public PrivateUrl execute(CommandContext ctxt) throws CommandException {
+ logger.fine("Executing CreatePrivateUrlCommand...");
+ if (dataset == null) {
+ /**
+ * @todo Internationalize this.
+ */
+ String message = "Can't create Private URL. Dataset is null.";
+ logger.info(message);
+ throw new IllegalCommandException(message, this);
+ }
+ PrivateUrl existing = ctxt.privateUrl().getPrivateUrlFromDatasetId(dataset.getId());
+ if (existing != null) {
+ /**
+ * @todo Internationalize this.
+ */
+ String message = "Private URL already exists for dataset id " + dataset.getId() + ".";
+ logger.info(message);
+ throw new IllegalCommandException(message, this);
+ }
+ DatasetVersion latestVersion = dataset.getLatestVersion();
+ if (!latestVersion.isDraft()) {
+ /**
+ * @todo Internationalize this.
+ */
+ String message = "Can't create Private URL because the latest version of dataset id " + dataset.getId() + " is not a draft.";
+ logger.info(message);
+ throw new IllegalCommandException(message, this);
+ }
+ PrivateUrlUser privateUrlUser = new PrivateUrlUser(dataset.getId());
+ DataverseRole memberRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.MEMBER);
+ final String privateUrlToken = UUID.randomUUID().toString();
+ RoleAssignment roleAssignment = ctxt.engine().submit(new AssignRoleCommand(privateUrlUser, memberRole, dataset, getRequest(), privateUrlToken));
+ PrivateUrl privateUrl = new PrivateUrl(roleAssignment, dataset, ctxt.systemConfig().getDataverseSiteUrl());
+ return privateUrl;
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java
index afbb60efc56..ff28021146d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java
@@ -10,6 +10,7 @@
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
/**
* Create a new role in a dataverse.
@@ -33,11 +34,11 @@ public DataverseRole execute(CommandContext ctxt) throws CommandException {
User user = getUser();
//todo: temporary for 4.0 - only superusers can create and edit roles
if ((!(user instanceof AuthenticatedUser) || !user.isSuperuser())) {
- throw new CommandException("Roles can only be created or edited by superusers.",this);
+ throw new IllegalCommandException("Roles can only be created or edited by superusers.",this);
}
dv.addRole(created);
return ctxt.roles().save(created);
}
-
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java
index 625b82319a1..402cdc0b4c9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java
@@ -16,8 +16,11 @@
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.export.ExportException;
+import edu.harvard.iq.dataverse.export.ExportService;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import java.util.ResourceBundle;
+import java.util.logging.Level;
/**
*
@@ -67,6 +70,26 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException {
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(managed.getDataset(), doNormalSolrDocCleanUp);
+ // if there is still another released version of this dataset,
+ // we want to re-export it :
+
+ ExportService instance = ExportService.getInstance();
+
+ if (managed.getDataset().getReleasedVersion() != null) {
+ try {
+ instance.exportAllFormats(managed.getDataset());
+ } catch (ExportException ex) {
+ // Something went wrong!
+ // But we're not going to treat it as a fatal condition.
+ }
+ } else {
+ // otherwise, we need to wipe clean the exports we may have cached:
+ instance.clearAllCachedFormats(managed.getDataset());
+ }
+ // And save the dataset, to get the "last exported" timestamp right:
+
+ Dataset managedDs = ctxt.em().merge(managed.getDataset());
+
return managed;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java
index c2bbbefac86..5ff5b71b836 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java
@@ -3,7 +3,9 @@
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.RoleAssignment;
import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
@@ -11,7 +13,10 @@
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
import java.util.Iterator;
+import java.util.List;
+import java.util.logging.Logger;
/**
*
@@ -20,6 +25,8 @@
@RequiredPermissions(Permission.DeleteDatasetDraft)
public class DeleteDatasetVersionCommand extends AbstractVoidCommand {
+ private static final Logger logger = Logger.getLogger(DeleteDatasetVersionCommand.class.getCanonicalName());
+
private final Dataset doomed;
public DeleteDatasetVersionCommand(DataverseRequest aRequest, Dataset dataset) {
@@ -64,6 +71,26 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
dvIt.remove();
}
}
+ /**
+ * DeleteDatasetDraft, which is required by this command,
+ * DeleteDatasetVersionCommand is not sufficient for running
+ * GetPrivateUrlCommand nor DeletePrivateUrlCommand, both of
+ * which require ManageDatasetPermissions because
+ * DeletePrivateUrlCommand calls RevokeRoleCommand which
+ * requires ManageDatasetPermissions when executed on a dataset
+ * so we make direct calls to the service bean so that a lowly
+ * Contributor who does NOT have ManageDatasetPermissions can
+ * still successfully delete a Private URL.
+ */
+ PrivateUrl privateUrl = ctxt.privateUrl().getPrivateUrlFromDatasetId(doomed.getId());
+ if (privateUrl != null) {
+ logger.fine("Deleting Private URL for dataset id " + doomed.getId());
+ PrivateUrlUser privateUrlUser = new PrivateUrlUser(doomed.getId());
+ List roleAssignments = ctxt.roles().directRoleAssignments(privateUrlUser, doomed);
+ for (RoleAssignment roleAssignment : roleAssignments) {
+ ctxt.roles().revoke(roleAssignment);
+ }
+ }
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(doomed, doNormalSolrDocCleanUp);
return;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteHarvestingClientCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteHarvestingClientCommand.java
new file mode 100644
index 00000000000..cff2e2e5540
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteHarvestingClientCommand.java
@@ -0,0 +1,57 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+
+/**
+ *
+ * @author Leonid Andreev
+ */
+@RequiredPermissions( Permission.EditDataverse )
+public class DeleteHarvestingClientCommand extends AbstractVoidCommand {
+
+ private final Dataverse motherDataverse;
+ private final HarvestingClient harvestingClient;
+
+ public DeleteHarvestingClientCommand(DataverseRequest aRequest, HarvestingClient harvestingClient) {
+ super(aRequest, harvestingClient.getDataverse());
+ this.motherDataverse = harvestingClient.getDataverse();
+ this.harvestingClient = harvestingClient;
+ }
+
+ @Override
+ public void executeImpl(CommandContext ctxt) throws CommandException {
+
+ if (harvestingClient == null) {
+ throw new IllegalCommandException("DeleteHarvestingClientCommand: attempted to execute with null harvesting client; dataverse: "+motherDataverse.getAlias(), this);
+ }
+
+ HarvestingClient merged = ctxt.em().merge(harvestingClient);
+
+ // Purge all the SOLR documents associated with this client from the
+ // index server:
+ // ctxt.index().deleteHarvestedDocuments(merged);
+
+ // All the datasets harvested by this client will be cleanly deleted
+ // through the defined cascade. Cascaded delete does not work for harvested
+ // files, however. So they need to be removed explicitly; before we
+ // proceed removing the client itself.
+
+ for (DataFile harvestedFile : ctxt.files().findHarvestedFilesByClient(merged)) {
+ DataFile mergedFile = ctxt.em().merge(harvestedFile);
+ ctxt.em().remove(mergedFile);
+ harvestedFile = null;
+ }
+
+ ctxt.em().remove(merged);
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommand.java
new file mode 100644
index 00000000000..34429828a8e
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommand.java
@@ -0,0 +1,46 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.RoleAssignment;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
+import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import java.util.List;
+import java.util.logging.Logger;
+
+@RequiredPermissions(Permission.ManageDatasetPermissions)
+public class DeletePrivateUrlCommand extends AbstractVoidCommand {
+
+ private static final Logger logger = Logger.getLogger(DeletePrivateUrlCommand.class.getCanonicalName());
+
+ final Dataset dataset;
+
+ public DeletePrivateUrlCommand(DataverseRequest aRequest, Dataset theDataset) {
+ super(aRequest, theDataset);
+ dataset = theDataset;
+ }
+
+ @Override
+ protected void executeImpl(CommandContext ctxt) throws CommandException {
+ logger.fine("Executing DeletePrivateUrlCommand....");
+ if (dataset == null) {
+ /**
+ * @todo Internationalize this.
+ */
+ String message = "Can't delete Private URL. Dataset is null.";
+ logger.info(message);
+ throw new IllegalCommandException(message, this);
+ }
+ PrivateUrlUser privateUrlUser = new PrivateUrlUser(dataset.getId());
+ List roleAssignments = ctxt.roles().directRoleAssignments(privateUrlUser, dataset);
+ for (RoleAssignment roleAssignment : roleAssignments) {
+ ctxt.engine().submit(new RevokeRoleCommand(roleAssignment, getRequest()));
+ }
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetHarvestingClientCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetHarvestingClientCommand.java
new file mode 100644
index 00000000000..adb35a0ea47
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetHarvestingClientCommand.java
@@ -0,0 +1,53 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ *
+ * @author Leonid Andreev
+ */
+// One can view the configuration of a Harvesting Client if and only if
+// they have the permission to view the dataverse that owns the harvesting
+// client. And for a Dataverse, we cannot define the permission with a
+// @RequiredPermission annotation - because the decision has to be made dynamically:
+// Everybody can view a published Dataverse; otherwise, an explicit
+// ViewUnpublishedDataverse is needed.
+// This is defined in the getRequiredPermissions() method, below.
+public class GetHarvestingClientCommand extends AbstractCommand{
+ private final Dataverse ownerDataverse;
+ private final HarvestingClient harvestingClient;
+
+ public GetHarvestingClientCommand(DataverseRequest aRequest, HarvestingClient harvestingClient) {
+ super(aRequest, harvestingClient.getDataverse());
+ this.ownerDataverse = harvestingClient.getDataverse();
+ this.harvestingClient = harvestingClient;
+ }
+
+ @Override
+ public HarvestingClient execute(CommandContext ctxt) throws CommandException {
+ if (ownerDataverse == null) {
+ throw new IllegalCommandException("GetHarvestingClientCommand called on a null Dataverse object", this);
+ }
+ if (harvestingClient == null) {
+ throw new IllegalCommandException("GetHarvestigClientCommand called on a null HarvestingClient object", this);
+ }
+ return harvestingClient;
+ }
+
+ @Override
+ public Map> getRequiredPermissions() {
+ return Collections.singletonMap("",
+ ownerDataverse.isReleased() ? Collections.emptySet()
+ : Collections.singleton(Permission.ViewUnpublishedDataverse));
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommand.java
new file mode 100644
index 00000000000..5e698dcf1b9
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommand.java
@@ -0,0 +1,36 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
+import java.util.logging.Logger;
+
+@RequiredPermissions(Permission.ManageDatasetPermissions)
+public class GetPrivateUrlCommand extends AbstractCommand {
+
+ private static final Logger logger = Logger.getLogger(GetPrivateUrlCommand.class.getCanonicalName());
+
+ private final Dataset dataset;
+
+ public GetPrivateUrlCommand(DataverseRequest aRequest, Dataset theDataset) {
+ super(aRequest, theDataset);
+ dataset = theDataset;
+ }
+
+ @Override
+ public PrivateUrl execute(CommandContext ctxt) throws CommandException {
+ logger.fine("GetPrivateUrlCommand called");
+ Long datasetId = dataset.getId();
+ if (datasetId == null) {
+ // Perhaps a dataset is being created in the GUI.
+ return null;
+ }
+ return ctxt.privateUrl().getPrivateUrlFromDatasetId(datasetId);
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java
index b5493b1e024..ed438bc3815 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRoleAssignments.java
@@ -1,6 +1,6 @@
package edu.harvard.iq.dataverse.engine.command.impl;
-import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.DvObject;
import edu.harvard.iq.dataverse.RoleAssignment;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
@@ -14,12 +14,11 @@
*
* @author michael
*/
-//@todo should this command exist for other dvObjects
@RequiredPermissions( Permission.ManageDataversePermissions )
public class ListRoleAssignments extends AbstractCommand> {
- private final Dataverse definitionPoint;
- public ListRoleAssignments(DataverseRequest aRequest, Dataverse aDefinitionPoint) {
+ private final DvObject definitionPoint;
+ public ListRoleAssignments(DataverseRequest aRequest, DvObject aDefinitionPoint) {
super(aRequest, aDefinitionPoint);
definitionPoint = aDefinitionPoint;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java
index b7644a96432..16cd6d0be52 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java
@@ -1,9 +1,12 @@
package edu.harvard.iq.dataverse.engine.command.impl;
import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissionsMap;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
@@ -15,12 +18,12 @@
//@todo We will need to revist the permissions for move, once we add this
//(will probably need different move commands for unplublished which checks add,
//versus published which checks publish
-/*
+
@RequiredPermissionsMap({
- @RequiredPermissions( dataverseName = "moved", value = {Permission.UndoableEdit, Permission.AssignRole} ),
- @RequiredPermissions( dataverseName = "source", value = Permission.UndoableEdit ),
- @RequiredPermissions( dataverseName = "destination", value = Permission.DestructiveEdit )
-})*/
+ @RequiredPermissions( dataverseName = "moved", value = {Permission.ManageDataversePermissions, Permission.EditDataverse} ),
+ @RequiredPermissions( dataverseName = "source", value = Permission.DeleteDataverse ),
+ @RequiredPermissions( dataverseName = "destination", value = Permission.AddDataverse )
+})
public class MoveDataverseCommand extends AbstractVoidCommand {
final Dataverse moved;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
index 36792f20054..43d24095396 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
@@ -22,13 +22,25 @@
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.export.ExportException;
+import edu.harvard.iq.dataverse.export.ExportService;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
import edu.harvard.iq.dataverse.search.IndexResponse;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonAsDatasetDto;
+import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
import java.sql.Timestamp;
import java.util.Date;
import java.util.List;
import java.util.ResourceBundle;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.json.JsonObjectBuilder;
/**
*
@@ -36,6 +48,7 @@
*/
@RequiredPermissions(Permission.PublishDataset)
public class PublishDatasetCommand extends AbstractCommand {
+ private static final Logger logger = Logger.getLogger(PublishDatasetCommand.class.getCanonicalName());
boolean minorRelease = false;
Dataset theDataset;
@@ -174,6 +187,26 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
}
theDataset.setFileAccessRequest(theDataset.getLatestVersion().getTermsOfUseAndAccess().isFileAccessRequest());
+
+
+ /*
+ Attempting to run metadata export, for all the formats for which
+ we have metadata Exporters:
+ */
+
+ try {
+ ExportService instance = ExportService.getInstance();
+ instance.exportAllFormats(theDataset);
+
+ } catch (ExportException ex) {
+ // Something went wrong!
+ // Just like with indexing, a failure to export is not a fatal
+ // condition. We'll just log the error as a warning and keep
+ // going:
+ logger.log(Level.WARNING, "Exception while exporting:" + ex.getMessage());
+ }
+
+
Dataset savedDataset = ctxt.em().merge(theDataset);
// set the subject of the parent (all the way up) Dataverses
@@ -228,6 +261,12 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
}
}
+ PrivateUrl privateUrl = ctxt.engine().submit(new GetPrivateUrlCommand(getRequest(), savedDataset));
+ if (privateUrl != null) {
+ logger.fine("Deleting Private URL for dataset id " + savedDataset.getId());
+ ctxt.engine().submit(new DeletePrivateUrlCommand(getRequest(), savedDataset));
+ }
+
/*
MoveIndexing to after DOI update so that if command exception is thrown the re-index will not
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java
index e9670356c21..51e595b7533 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java
@@ -137,6 +137,12 @@ public Dataset save(CommandContext ctxt) throws CommandException {
}
// Remove / delete any files that were removed
+
+ // If any of the files that we are deleting has a UNF, we will need to
+ // re-calculate the UNF of the version - since that is the product
+ // of the UNFs of the individual files.
+ boolean recalculateUNF = false;
+
for (FileMetadata fmd : filesToDelete) {
// check if this file is being used as the default thumbnail
if (fmd.getDataFile().equals(theDataset.getThumbnailFile())) {
@@ -144,6 +150,10 @@ public Dataset save(CommandContext ctxt) throws CommandException {
theDataset.setThumbnailFile(null);
}
+ if (fmd.getDataFile().getUnf() != null) {
+ recalculateUNF = true;
+ }
+
if (!fmd.getDataFile().isReleased()) {
// if file is draft (ie. new to this version, delete; otherwise just remove filemetadata object)
ctxt.engine().submit(new DeleteDataFileCommand(fmd.getDataFile(), getRequest()));
@@ -163,6 +173,10 @@ public Dataset save(CommandContext ctxt) throws CommandException {
}
}
+ if (recalculateUNF) {
+ ctxt.ingest().recalculateDatasetVersionUNF(theDataset.getEditVersion());
+ }
+
String nonNullDefaultIfKeyNotFound = "";
String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateHarvestingClientCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateHarvestingClientCommand.java
new file mode 100644
index 00000000000..d8504da6296
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateHarvestingClientCommand.java
@@ -0,0 +1,36 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+
+/**
+ *
+ * @author Leonid Andreev
+ */
+@RequiredPermissions( Permission.EditDataverse )
+public class UpdateHarvestingClientCommand extends AbstractCommand {
+
+ private final Dataverse dv;
+ private final HarvestingClient harvestingClient;
+
+ public UpdateHarvestingClientCommand(DataverseRequest aRequest, HarvestingClient harvestingClient) {
+ super(aRequest, harvestingClient.getDataverse());
+ this.harvestingClient = harvestingClient;
+ this.dv = harvestingClient.getDataverse();
+ }
+
+ @Override
+ public HarvestingClient execute(CommandContext ctxt) throws CommandException {
+ // TODO: check that the harvesting client config is attached to a legit
+ // dataverse; and that we are in fact modifying a config that already
+ // exists. -- L.A. 4.4
+ return ctxt.em().merge(this.harvestingClient);
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java
new file mode 100644
index 00000000000..7c5fea0f1ec
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java
@@ -0,0 +1,75 @@
+
+package edu.harvard.iq.dataverse.export;
+
+import com.google.auto.service.AutoService;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.export.dublincore.DublinCoreExportUtil;
+import edu.harvard.iq.dataverse.export.spi.Exporter;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import java.io.OutputStream;
+import javax.json.JsonObject;
+import javax.xml.stream.XMLStreamException;
+
+/**
+ *
+ * @author Leonid Andreev
+ */
+@AutoService(Exporter.class)
+public class DCTermsExporter implements Exporter {
+
+
+
+ @Override
+ public String getProviderName() {
+ return "dcterms";
+ }
+
+ @Override
+ public String getDisplayName() {
+ return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dublinCore") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dublinCore") : "Dublin Core (DCTERMS)";
+ }
+
+ @Override
+ public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
+ try {
+ DublinCoreExportUtil.datasetJson2dublincore(json, outputStream, DublinCoreExportUtil.DC_FLAVOR_DCTERMS);
+ } catch (XMLStreamException xse) {
+ throw new ExportException("Caught XMLStreamException performing DCTERMS export");
+ }
+ }
+
+ @Override
+ public Boolean isXMLFormat() {
+ return true;
+ }
+
+ @Override
+ public Boolean isHarvestable() {
+ return false;
+ }
+
+ @Override
+ public Boolean isAvailableToUsers() {
+ return true;
+ }
+
+ @Override
+ public String getXMLNameSpace() throws ExportException {
+ return DublinCoreExportUtil.DCTERMS_XML_NAMESPACE;
+ }
+
+ @Override
+ public String getXMLSchemaLocation() throws ExportException {
+ return DublinCoreExportUtil.DCTERMS_XML_SCHEMALOCATION;
+ }
+
+ @Override
+ public String getXMLSchemaVersion() throws ExportException {
+ return DublinCoreExportUtil.DEFAULT_XML_VERSION;
+ }
+
+ @Override
+ public void setParam(String name, Object value) {
+ // this exporter doesn't need/doesn't currently take any parameters
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java
new file mode 100644
index 00000000000..6c7a540d58c
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java
@@ -0,0 +1,90 @@
+
+package edu.harvard.iq.dataverse.export;
+
+import com.google.auto.service.AutoService;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
+import edu.harvard.iq.dataverse.export.spi.Exporter;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+import java.io.OutputStream;
+import javax.ejb.EJB;
+import javax.json.JsonObject;
+import javax.xml.stream.XMLStreamException;
+
+/**
+ *
+ * @author Leonid Andreev
+ * (based on the original DDIExporter by
+ * @author skraffmi
+ * - renamed OAI_DDIExporter)
+ */
+@AutoService(Exporter.class)
+public class DDIExporter implements Exporter {
+ // TODO:
+ // move these into the ddi export utility
+ private static String DEFAULT_XML_NAMESPACE = "ddi:codebook:2_5";
+ private static String DEFAULT_XML_SCHEMALOCATION = "http://www.ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd";
+ private static String DEFAULT_XML_VERSION = "2.5";
+
+ // This exporter is for the "full" DDI, that includes the file-level,
+ // and metadata.
+ @Override
+ public String getProviderName() {
+ return "ddi";
+ }
+
+ @Override
+ public String getDisplayName() {
+ return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.ddi") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.ddi") : "DDI";
+ }
+
+ @Override
+ public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
+ try {
+ DdiExportUtil.datasetJson2ddi(json, version, outputStream);
+ } catch (XMLStreamException xse) {
+ throw new ExportException ("Caught XMLStreamException performing DDI export");
+ }
+ }
+
+ @Override
+ public Boolean isXMLFormat() {
+ return true;
+ }
+
+ @Override
+ public Boolean isHarvestable() {
+ // No, we don't want this format to be harvested!
+ // For datasets with tabular data the portions of the DDIs
+ // become huge and expensive to parse; even as they don't contain any
+ // metadata useful to remote harvesters. -- L.A. 4.5
+ return false;
+ }
+
+ @Override
+ public Boolean isAvailableToUsers() {
+ return true;
+ }
+
+ @Override
+ public String getXMLNameSpace() throws ExportException {
+ return this.DEFAULT_XML_NAMESPACE;
+ }
+
+ @Override
+ public String getXMLSchemaLocation() throws ExportException {
+ return this.DEFAULT_XML_SCHEMALOCATION;
+ }
+
+ @Override
+ public String getXMLSchemaVersion() throws ExportException {
+ return this.DEFAULT_XML_VERSION;
+ }
+
+ @Override
+ public void setParam(String name, Object value) {
+ // this exporter does not uses or supports any parameters as of now.
+ }
+}
+
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java
new file mode 100644
index 00000000000..7c4ebfdd44d
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java
@@ -0,0 +1,75 @@
+
+package edu.harvard.iq.dataverse.export;
+
+import com.google.auto.service.AutoService;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.export.dublincore.DublinCoreExportUtil;
+import edu.harvard.iq.dataverse.export.spi.Exporter;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import java.io.OutputStream;
+import javax.json.JsonObject;
+import javax.xml.stream.XMLStreamException;
+
+/**
+ *
+ * @author skraffmi
+ */
+@AutoService(Exporter.class)
+public class DublinCoreExporter implements Exporter {
+
+
+
+ @Override
+ public String getProviderName() {
+ return "oai_dc";
+ }
+
+ @Override
+ public String getDisplayName() {
+ return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dublinCore") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dublinCore") : "Dublin Core";
+ }
+
+ @Override
+ public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
+ try {
+ DublinCoreExportUtil.datasetJson2dublincore(json, outputStream, DublinCoreExportUtil.DC_FLAVOR_OAI);
+ } catch (XMLStreamException xse) {
+ throw new ExportException("Caught XMLStreamException performing DC export");
+ }
+ }
+
+ @Override
+ public Boolean isXMLFormat() {
+ return true;
+ }
+
+ @Override
+ public Boolean isHarvestable() {
+ return true;
+ }
+
+ @Override
+ public Boolean isAvailableToUsers() {
+ return false;
+ }
+
+ @Override
+ public String getXMLNameSpace() throws ExportException {
+ return DublinCoreExportUtil.OAI_DC_XML_NAMESPACE;
+ }
+
+ @Override
+ public String getXMLSchemaLocation() throws ExportException {
+ return DublinCoreExportUtil.OAI_DC_XML_SCHEMALOCATION;
+ }
+
+ @Override
+ public String getXMLSchemaVersion() throws ExportException {
+ return DublinCoreExportUtil.DEFAULT_XML_VERSION;
+ }
+
+ @Override
+ public void setParam(String name, Object value) {
+ // this exporter doesn't need/doesn't currently take any parameters
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ExportException.java b/src/main/java/edu/harvard/iq/dataverse/export/ExportException.java
new file mode 100644
index 00000000000..0909bf06ca4
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ExportException.java
@@ -0,0 +1,20 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.export;
+
+/**
+ *
+ * @author Leonid Andreev
+ */
+public class ExportException extends Exception {
+ public ExportException(String message) {
+ super(message);
+ }
+
+ public ExportException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java
new file mode 100644
index 00000000000..4a8b49c25e9
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java
@@ -0,0 +1,295 @@
+
+package edu.harvard.iq.dataverse.export;
+
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.export.spi.Exporter;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonAsDatasetDto;
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.ServiceConfigurationError;
+import java.util.ServiceLoader;
+import javax.ejb.EJB;
+import javax.ejb.TransactionAttribute;
+import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
+import javax.json.JsonObject;
+import javax.json.JsonObjectBuilder;
+
+/**
+ *
+ * @author skraffmi
+ */
+public class ExportService {
+
+ private static ExportService service;
+ private ServiceLoader loader;
+
+ private ExportService() {
+ loader = ServiceLoader.load(Exporter.class);
+ }
+
+ public static synchronized ExportService getInstance() {
+ if (service == null) {
+ service = new ExportService();
+ } else{
+ service.loader.reload();
+ }
+ return service;
+ }
+
+ public List< String[]> getExportersLabels() {
+ List retList = new ArrayList();
+ Iterator exporters = ExportService.getInstance().loader.iterator();
+ while (exporters.hasNext()) {
+ Exporter e = exporters.next();
+ String[] temp = new String[2];
+ temp[0] = e.getDisplayName();
+ temp[1] = e.getProviderName();
+ retList.add(temp);
+ }
+ return retList;
+ }
+
+ public InputStream getExport(Dataset dataset, String formatName) throws ExportException {
+ // first we will try to locate an already existing, cached export
+ // for this format:
+ InputStream exportInputStream = getCachedExportFormat(dataset, formatName);
+
+ if (exportInputStream != null) {
+ return exportInputStream;
+ }
+
+ // if it doesn't exist, we'll try to run the export:
+
+ exportFormat(dataset, formatName);
+
+ // and then try again:
+
+ exportInputStream = getCachedExportFormat(dataset, formatName);
+
+ if (exportInputStream != null) {
+ return exportInputStream;
+ }
+
+ // if there is no cached export still - we have to give up and throw
+ // an exception!
+
+ throw new ExportException("Failed to export the dataset as "+formatName);
+
+ }
+
+ public String getExportAsString(Dataset dataset, String formatName) {
+ try {
+ InputStream inputStream = getExport(dataset, formatName);
+ if (inputStream != null) {
+ BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, "UTF8"));
+ StringBuilder sb = new StringBuilder();
+ String line;
+ while ((line = br.readLine()) != null) {
+ sb.append(line);
+ sb.append('\n');
+ }
+ br.close();
+ return sb.toString();
+ }
+ } catch (Exception ex) {
+ //ex.printStackTrace();
+ return null;
+ }
+ return null;
+
+ }
+
+
+ // This method goes through all the Exporters and calls
+ // the "chacheExport()" method that will save the produced output
+ // in a file in the dataset directory, on each Exporter available.
+
+ public void exportAllFormats (Dataset dataset) throws ExportException {
+ clearAllCachedFormats(dataset);
+
+ try {
+ DatasetVersion releasedVersion = dataset.getReleasedVersion();
+ if (releasedVersion == null) {
+ throw new ExportException("No released version for dataset "+dataset.getGlobalId());
+ }
+ final JsonObjectBuilder datasetAsJsonBuilder = jsonAsDatasetDto(releasedVersion);
+ JsonObject datasetAsJson = datasetAsJsonBuilder.build();
+
+ Iterator exporters = loader.iterator();
+ while ( exporters.hasNext()) {
+ Exporter e = exporters.next();
+ String formatName = e.getProviderName();
+
+ cacheExport(releasedVersion, formatName, datasetAsJson, e);
+
+ }
+ } catch (ServiceConfigurationError serviceError) {
+ throw new ExportException("Service configuration error during export. "+serviceError.getMessage());
+ }
+ // Finally, if we have been able to successfully export in all available
+ // formats, we'll increment the "last exported" time stamp:
+
+ dataset.setLastExportTime(new Timestamp(new Date().getTime()));
+
+ }
+
+ public void clearAllCachedFormats(Dataset dataset) {
+ Iterator exporters = loader.iterator();
+ while (exporters.hasNext()) {
+ Exporter e = exporters.next();
+ String formatName = e.getProviderName();
+
+ clearCachedExport(dataset, formatName);
+ }
+
+ dataset.setLastExportTime(null);
+ }
+
+ // This method finds the exporter for the format requested,
+ // then produces the dataset metadata as a JsonObject, then calls
+ // the "chacheExport()" method that will save the produced output
+ // in a file in the dataset directory.
+
+ public void exportFormat(Dataset dataset, String formatName) throws ExportException {
+ try {
+ Iterator exporters = loader.iterator();
+ while (exporters.hasNext()) {
+ Exporter e = exporters.next();
+ if (e.getProviderName().equals(formatName)) {
+ DatasetVersion releasedVersion = dataset.getReleasedVersion();
+ if (releasedVersion == null) {
+ throw new IllegalStateException("No Released Version");
+ }
+ final JsonObjectBuilder datasetAsJsonBuilder = jsonAsDatasetDto(releasedVersion);
+ cacheExport(releasedVersion, formatName, datasetAsJsonBuilder.build(), e);
+ }
+ }
+ } catch (ServiceConfigurationError serviceError) {
+ throw new ExportException("Service configuration error during export. " + serviceError.getMessage());
+ } catch (IllegalStateException e) {
+ throw new ExportException("No published version found during export. " + dataset.getGlobalId());
+ }
+ }
+
+ public Exporter getExporter(String formatName) throws ExportException {
+ try {
+ Iterator exporters = loader.iterator();
+ while (exporters.hasNext()) {
+ Exporter e = exporters.next();
+ if (e.getProviderName().equals(formatName)) {
+ return e;
+ }
+ }
+ } catch (ServiceConfigurationError serviceError) {
+ throw new ExportException("Service configuration error during export. " + serviceError.getMessage());
+ } catch (Exception ex) {
+ throw new ExportException("Could not find Exporter \""+formatName+"\", unknown exception");
+ }
+ throw new ExportException("No such Exporter: "+formatName);
+ }
+
+ // This method runs the selected metadata exporter, caching the output
+ // in a file in the dataset dirctory:
+ private void cacheExport(DatasetVersion version, String format, JsonObject datasetAsJson, Exporter exporter) throws ExportException {
+ try {
+ if (version.getDataset().getFileSystemDirectory() != null && !Files.exists(version.getDataset().getFileSystemDirectory())) {
+ /* Note that "createDirectories()" must be used - not
+ * "createDirectory()", to make sure all the parent
+ * directories that may not yet exist are created as well.
+ */
+
+ Files.createDirectories(version.getDataset().getFileSystemDirectory());
+ }
+
+ Path cachedMetadataFilePath = Paths.get(version.getDataset().getFileSystemDirectory().toString(), "export_" + format + ".cached");
+ FileOutputStream cachedExportOutputStream = new FileOutputStream(cachedMetadataFilePath.toFile());
+ exporter.exportDataset(version, datasetAsJson, cachedExportOutputStream);
+ cachedExportOutputStream.flush();
+ cachedExportOutputStream.close();
+
+ } catch (IOException ioex) {
+ throw new ExportException("IO Exception thrown exporting as " + format);
+ }
+
+ }
+
+ private void clearCachedExport(Dataset dataset, String format) {
+ if (dataset != null && dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) {
+
+ Path cachedMetadataFilePath = Paths.get(dataset.getFileSystemDirectory().toString(), "export_" + format + ".cached");
+ try {
+ Files.delete(cachedMetadataFilePath);
+ } catch (IOException ioex) {
+ }
+ }
+ }
+
+ // This method checks if the metadata has already been exported in this
+ // format and cached on disk. If it has, it'll open the file and retun
+ // the file input stream. If not, it'll return null.
+
+ private InputStream getCachedExportFormat(Dataset dataset, String formatName) {
+
+ try {
+ if (dataset.getFileSystemDirectory() != null) {
+ Path cachedMetadataFilePath = Paths.get(dataset.getFileSystemDirectory().toString(), "export_" + formatName + ".cached");
+ if (Files.exists(cachedMetadataFilePath)) {
+ FileInputStream cachedExportInputStream = new FileInputStream(cachedMetadataFilePath.toFile());
+ return cachedExportInputStream;
+ }
+ }
+ } catch (IOException ioex) {
+ // don't do anything - we'll just return null
+ }
+
+ return null;
+
+ }
+
+ public Long getCachedExportSize(Dataset dataset, String formatName) {
+ try {
+ if (dataset.getFileSystemDirectory() != null) {
+ Path cachedMetadataFilePath = Paths.get(dataset.getFileSystemDirectory().toString(), "export_" + formatName + ".cached");
+ if (Files.exists(cachedMetadataFilePath)) {
+ return cachedMetadataFilePath.toFile().length();
+ }
+ }
+ } catch (Exception ioex) {
+ // don't do anything - we'll just return null
+ }
+
+ return null;
+ }
+
+
+ public Boolean isXMLFormat(String provider){
+ try {
+ Iterator exporters = loader.iterator();
+ while (exporters.hasNext()) {
+ Exporter e = exporters.next();
+ if (e.getProviderName().equals(provider)) {
+ return e.isXMLFormat();
+ }
+ }
+ } catch (ServiceConfigurationError serviceError) {
+ serviceError.printStackTrace();
+ }
+ return null;
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
new file mode 100644
index 00000000000..03d04ca8116
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
@@ -0,0 +1,76 @@
+
+package edu.harvard.iq.dataverse.export;
+
+import com.google.auto.service.AutoService;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.export.spi.Exporter;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import javax.json.JsonObject;
+
+
+/**
+ *
+ * @author skraffmi
+ */
+@AutoService(Exporter.class)
+public class JSONExporter implements Exporter {
+
+ @Override
+ public String getProviderName() {
+ return "dataverse_json";
+ }
+
+ @Override
+ public String getDisplayName() {
+ return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.json") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.json") : "JSON";
+ }
+
+ @Override
+ public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
+ try{
+ outputStream.write(json.toString().getBytes("UTF8"));
+ outputStream.flush();
+ } catch (Exception e){
+ throw new ExportException("Unknown exception caught during JSON export.");
+ }
+ }
+
+ @Override
+ public Boolean isXMLFormat() {
+ return false;
+ }
+
+ @Override
+ public Boolean isHarvestable() {
+ return true;
+ }
+
+ @Override
+ public Boolean isAvailableToUsers() {
+ return true;
+ }
+
+ @Override
+ public String getXMLNameSpace() throws ExportException {
+ throw new ExportException ("JSONExporter: not an XML format.");
+ }
+
+ @Override
+ public String getXMLSchemaLocation() throws ExportException {
+ throw new ExportException ("JSONExporter: not an XML format.");
+ }
+
+ @Override
+ public String getXMLSchemaVersion() throws ExportException {
+ throw new ExportException ("JSONExporter: not an XML format.");
+ }
+
+ @Override
+ public void setParam(String name, Object value) {
+ // this exporter doesn't need/doesn't currently take any parameters
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java
new file mode 100644
index 00000000000..eafe3daf726
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java
@@ -0,0 +1,82 @@
+
+package edu.harvard.iq.dataverse.export;
+
+import com.google.auto.service.AutoService;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
+import edu.harvard.iq.dataverse.export.spi.Exporter;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+import java.io.OutputStream;
+import javax.ejb.EJB;
+import javax.json.JsonObject;
+import javax.xml.stream.XMLStreamException;
+
+/**
+ *
+ * @author skraffmi
+ */
+@AutoService(Exporter.class)
+public class OAI_DDIExporter implements Exporter {
+ // TODO: move these to the export utility:
+ private static String DEFAULT_XML_NAMESPACE = "ddi:codebook:2_5";
+ private static String DEFAULT_XML_SCHEMALOCATION = "http://www.ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd";
+ private static String DEFAULT_XML_VERSION = "2.5";
+
+ @Override
+ public String getProviderName() {
+ return "oai_ddi";
+ }
+
+ @Override
+ public String getDisplayName() {
+ return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.ddi") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.ddi") : "DDI";
+ }
+
+ @Override
+ public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
+ try {
+ // This exporter is for the OAI ("short") flavor of the DDI -
+ // that is, without the variable/data information. The ddi export
+ // utility does not need the version entity to produce that.
+ DdiExportUtil.datasetJson2ddi(json, outputStream);
+ } catch (XMLStreamException xse) {
+ throw new ExportException ("Caught XMLStreamException performing DDI export");
+ }
+ }
+
+ @Override
+ public Boolean isXMLFormat() {
+ return true;
+ }
+
+ @Override
+ public Boolean isHarvestable() {
+ return true;
+ }
+
+ @Override
+ public Boolean isAvailableToUsers() {
+ return false;
+ }
+
+ @Override
+ public String getXMLNameSpace() throws ExportException {
+ return OAI_DDIExporter.DEFAULT_XML_NAMESPACE;
+ }
+
+ @Override
+ public String getXMLSchemaLocation() throws ExportException {
+ return OAI_DDIExporter.DEFAULT_XML_SCHEMALOCATION;
+ }
+
+ @Override
+ public String getXMLSchemaVersion() throws ExportException {
+ return OAI_DDIExporter.DEFAULT_XML_VERSION;
+ }
+
+ @Override
+ public void setParam(String name, Object value) {
+ // this exporter does not uses or supports any parameters as of now.
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
index 1e71ea35d31..4dee2a27f66 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
@@ -1,25 +1,49 @@
package edu.harvard.iq.dataverse.export.ddi;
import com.google.gson.Gson;
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.DataTable;
+import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetFieldConstant;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.api.dto.DataVariableDTO;
import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
import edu.harvard.iq.dataverse.api.dto.FieldDTO;
import edu.harvard.iq.dataverse.api.dto.FileDTO;
+import edu.harvard.iq.dataverse.api.dto.FileMetadataDTO;
import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
+import edu.harvard.iq.dataverse.datavariable.DataVariable;
+import edu.harvard.iq.dataverse.datavariable.SummaryStatistic;
+import edu.harvard.iq.dataverse.datavariable.VariableCategory;
+import edu.harvard.iq.dataverse.datavariable.VariableRange;
+import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.LEVEL_FILE;
+import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_TAG;
+import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_UNF;
+import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_TAG;
+import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_UNF;
+import static edu.harvard.iq.dataverse.util.SystemConfig.FQDN;
+import static edu.harvard.iq.dataverse.util.SystemConfig.SITE_URL;
import edu.harvard.iq.dataverse.util.json.JsonUtil;
import edu.harvard.iq.dataverse.util.xml.XmlPrinter;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
+import java.io.InputStream;
import java.io.OutputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
import java.nio.file.Files;
import java.nio.file.Paths;
+import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
+import javax.json.JsonObject;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
@@ -27,6 +51,9 @@
public class DdiExportUtil {
private static final Logger logger = Logger.getLogger(DdiExportUtil.class.getCanonicalName());
+
+ public static final String NOTE_TYPE_CONTENTTYPE = "DATAVERSE:CONTENTTYPE";
+ public static final String NOTE_SUBJECT_CONTENTTYPE = "Content/MIME Type";
public static String datasetDtoAsJson2ddi(String datasetDtoAsJson) {
logger.fine(JsonUtil.prettyPrint(datasetDtoAsJson));
@@ -39,21 +66,57 @@ public static String datasetDtoAsJson2ddi(String datasetDtoAsJson) {
return null;
}
}
-
+
+ // "short" ddi, without the "" and "/" sections:
+ public static void datasetJson2ddi(JsonObject datasetDtoAsJson, OutputStream outputStream) throws XMLStreamException {
+ logger.fine(JsonUtil.prettyPrint(datasetDtoAsJson.toString()));
+ Gson gson = new Gson();
+ DatasetDTO datasetDto = gson.fromJson(datasetDtoAsJson.toString(), DatasetDTO.class);
+ dtoddi(datasetDto, outputStream);
+ }
+
private static String dto2ddi(DatasetDTO datasetDto) throws XMLStreamException {
OutputStream outputStream = new ByteArrayOutputStream();
+ dtoddi(datasetDto, outputStream);
+ String xml = outputStream.toString();
+ return XmlPrinter.prettyPrintXml(xml);
+ }
+
+ private static void dtoddi(DatasetDTO datasetDto, OutputStream outputStream) throws XMLStreamException {
XMLStreamWriter xmlw = XMLOutputFactory.newInstance().createXMLStreamWriter(outputStream);
xmlw.writeStartElement("codeBook");
- xmlw.writeDefaultNamespace("http://www.icpsr.umich.edu/DDI");
- writeAttribute(xmlw, "version", "2.0");
+ xmlw.writeDefaultNamespace("ddi:codebook:2_5");
+ xmlw.writeAttribute("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance");
+ xmlw.writeAttribute("xsi:schemaLocation", "ddi:codebook:2_5 http://www.ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd");
+ writeAttribute(xmlw, "version", "2.5");
createStdyDscr(xmlw, datasetDto);
- createdataDscr(xmlw, datasetDto.getDatasetVersion().getFiles());
+ createOtherMats(xmlw, datasetDto.getDatasetVersion().getFiles());
xmlw.writeEndElement(); // codeBook
xmlw.flush();
- String xml = outputStream.toString();
- return XmlPrinter.prettyPrintXml(xml);
}
+
+ // "full" ddi, with the the "" and "/" sections:
+ public static void datasetJson2ddi(JsonObject datasetDtoAsJson, DatasetVersion version, OutputStream outputStream) throws XMLStreamException {
+ logger.fine(JsonUtil.prettyPrint(datasetDtoAsJson.toString()));
+ Gson gson = new Gson();
+ DatasetDTO datasetDto = gson.fromJson(datasetDtoAsJson.toString(), DatasetDTO.class);
+
+ XMLStreamWriter xmlw = XMLOutputFactory.newInstance().createXMLStreamWriter(outputStream);
+ xmlw.writeStartElement("codeBook");
+ xmlw.writeDefaultNamespace("ddi:codebook:2_5");
+ xmlw.writeAttribute("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance");
+ xmlw.writeAttribute("xsi:schemaLocation", "ddi:codebook:2_5 http://www.ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd");
+ writeAttribute(xmlw, "version", "2.5");
+ createStdyDscr(xmlw, datasetDto);
+ createFileDscr(xmlw, version);
+ createDataDscr(xmlw, version);
+ createOtherMatsFromFileMetadatas(xmlw, version.getFileMetadatas());
+ xmlw.writeEndElement(); // codeBook
+ xmlw.flush();
+ }
+
+
/**
* @todo This is just a stub, copied from DDIExportServiceBean. It should
* produce valid DDI based on
@@ -65,112 +128,1048 @@ private static String dto2ddi(DatasetDTO datasetDto) throws XMLStreamException {
* @todo Rename this from "study" to "dataset".
*/
private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto) throws XMLStreamException {
- String title = dto2title(datasetDto.getDatasetVersion());
- String authors = dto2authors(datasetDto.getDatasetVersion());
- String persistentAgency = datasetDto.getProtocol();
+ DatasetVersionDTO version = datasetDto.getDatasetVersion();
+ String persistentProtocol = datasetDto.getProtocol();
+ String persistentAgency = persistentProtocol;
+ // The "persistentAgency" tag is used for the "agency" attribute of the
+ // ddi section; back in the DVN3 days we used "handle" and "DOI"
+ // for the 2 supported protocols, respectively. For the sake of backward
+ // compatibility, we should probably stick with these labels: (-- L.A. 4.5)
+ if ("hdl".equals(persistentAgency)) {
+ persistentAgency = "handle";
+ } else if ("doi".equals(persistentAgency)) {
+ persistentAgency = "DOI";
+ }
+
String persistentAuthority = datasetDto.getAuthority();
- String persistentId = datasetDto.getIdentifier();
-
- String citation = datasetDto.getDatasetVersion().getCitation();
+ String persistentId = datasetDto.getIdentifier();
+ //docDesc Block
+ writeDocDescElement (xmlw, datasetDto);
+ //stdyDesc Block
xmlw.writeStartElement("stdyDscr");
xmlw.writeStartElement("citation");
-
xmlw.writeStartElement("titlStmt");
-
- xmlw.writeStartElement("titl");
- xmlw.writeCharacters(title);
- xmlw.writeEndElement(); // titl
-
+
+ writeFullElement(xmlw, "titl", dto2Primitive(version, DatasetFieldConstant.title));
+ writeFullElement(xmlw, "subTitl", dto2Primitive(version, DatasetFieldConstant.subTitle));
+ writeFullElement(xmlw, "altTitl", dto2Primitive(version, DatasetFieldConstant.alternativeTitle));
+
xmlw.writeStartElement("IDNo");
writeAttribute(xmlw, "agency", persistentAgency);
- xmlw.writeCharacters(persistentAuthority + "/" + persistentId);
+ xmlw.writeCharacters(persistentProtocol + ":" + persistentAuthority + "/" + persistentId);
xmlw.writeEndElement(); // IDNo
+
xmlw.writeEndElement(); // titlStmt
- xmlw.writeStartElement("rspStmt");
+ writeAuthorsElement(xmlw, version);
+ writeProducersElement(xmlw, version);
+
+ xmlw.writeStartElement("distStmt");
+ writeFullElement(xmlw, "distrbtr", datasetDto.getPublisher());
+ writeFullElement(xmlw, "distDate", datasetDto.getPublicationDate());
+ xmlw.writeEndElement(); // diststmt
- xmlw.writeStartElement("AuthEnty");
- xmlw.writeCharacters(authors);
- xmlw.writeEndElement(); // AuthEnty
+ xmlw.writeEndElement(); // citation
+ //End Citation Block
+
+ //Start Study Info Block
+ // Study Info
+ xmlw.writeStartElement("stdyInfo");
+
+ writeSubjectElement(xmlw, version); //Subject and Keywords
+ writeAbstractElement(xmlw, version); // Description
+ writeFullElement(xmlw, "notes", dto2Primitive(version, DatasetFieldConstant.notesText));
+
+ writeSummaryDescriptionElement(xmlw, version);
+ writeRelPublElement(xmlw, version);
- xmlw.writeEndElement(); // rspStmt
+ writeOtherIdElement(xmlw, version);
+ writeDistributorsElement(xmlw, version);
+ writeContactsElement(xmlw, version);
+ writeFullElement(xmlw, "depositr", dto2Primitive(version, DatasetFieldConstant.depositor));
+ writeFullElement(xmlw, "depDate", dto2Primitive(version, DatasetFieldConstant.dateOfDeposit));
+
+ writeFullElementList(xmlw, "relMat", dto2PrimitiveList(version, DatasetFieldConstant.relatedMaterial));
+ writeFullElementList(xmlw, "relStdy", dto2PrimitiveList(version, DatasetFieldConstant.relatedDatasets));
+ writeFullElementList(xmlw, "othRefs", dto2PrimitiveList(version, DatasetFieldConstant.otherReferences));
+ writeSeriesElement(xmlw, version);
+ writeSoftwareElement(xmlw, version);
+ writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources));
+ writeFullElement(xmlw, "srcOrig", dto2Primitive(version, DatasetFieldConstant.originOfSources));
+ writeFullElement(xmlw, "srcChar", dto2Primitive(version, DatasetFieldConstant.characteristicOfSources));
+ writeFullElement(xmlw, "srcDocu", dto2Primitive(version, DatasetFieldConstant.accessToSources));
+ xmlw.writeEndElement(); // stdyInfo
+ // End Info Block
+
+ //Social Science Metadata block
+
+ writeMethodElement(xmlw, version);
+
+ //Terms of Use and Access
+ writeFullElement(xmlw, "useStmt", version.getTermsOfUse());
+ writeFullElement(xmlw, "confDec", version.getConfidentialityDeclaration());
+ writeFullElement(xmlw, "specPerm", version.getSpecialPermissions());
+ writeFullElement(xmlw, "restrctn", version.getRestrictions());
+ writeFullElement(xmlw, "citeReq", version.getCitationRequirements());
+ writeFullElement(xmlw, "deposReq", version.getDepositorRequirements());
+ writeFullElement(xmlw, "dataAccs", version.getTermsOfAccess());
+ writeFullElement(xmlw, "accsPlac", version.getDataAccessPlace());
+ writeFullElement(xmlw, "conditions", version.getConditions());
+ writeFullElement(xmlw, "disclaimer", version.getDisclaimer());
+ writeFullElement(xmlw, "origArch", version.getOriginalArchive());
+ writeFullElement(xmlw, "avlStatus", version.getAvailabilityStatus());
+ writeFullElement(xmlw, "contact", version.getContactForAccess());
+ writeFullElement(xmlw, "collSize", version.getSizeOfCollection());
+ writeFullElement(xmlw, "complete", version.getStudyCompletion());
+
+
+ xmlw.writeEndElement(); // stdyDscr
+ }
+
+ private static void writeDocDescElement (XMLStreamWriter xmlw, DatasetDTO datasetDto) throws XMLStreamException {
+ DatasetVersionDTO version = datasetDto.getDatasetVersion();
+ String persistentProtocol = datasetDto.getProtocol();
+ String persistentAgency = persistentProtocol;
+ // The "persistentAgency" tag is used for the "agency" attribute of the
+ // ddi section; back in the DVN3 days we used "handle" and "DOI"
+ // for the 2 supported protocols, respectively. For the sake of backward
+ // compatibility, we should probably stick with these labels: (-- L.A. 4.5)
+ if ("hdl".equals(persistentAgency)) {
+ persistentAgency = "handle";
+ } else if ("doi".equals(persistentAgency)) {
+ persistentAgency = "DOI";
+ }
+
+ String persistentAuthority = datasetDto.getAuthority();
+ String persistentId = datasetDto.getIdentifier();
+
+ xmlw.writeStartElement("docDscr");
+ xmlw.writeStartElement("citation");
+ xmlw.writeStartElement("titlStmt");
+ writeFullElement(xmlw, "titl", dto2Primitive(version, DatasetFieldConstant.title));
+ xmlw.writeStartElement("IDNo");
+ writeAttribute(xmlw, "agency", persistentAgency);
+ xmlw.writeCharacters(persistentProtocol + ":" + persistentAuthority + "/" + persistentId);
+ xmlw.writeEndElement(); // IDNo
+ xmlw.writeEndElement(); // titlStmt
+ xmlw.writeStartElement("distStmt");
+ writeFullElement(xmlw, "distrbtr", datasetDto.getPublisher());
+ writeFullElement(xmlw, "distDate", datasetDto.getPublicationDate());
+
+ xmlw.writeEndElement(); // diststmt
+ writeVersionStatement(xmlw, version);
xmlw.writeStartElement("biblCit");
- xmlw.writeCharacters(citation);
+ xmlw.writeCharacters(version.getCitation());
xmlw.writeEndElement(); // biblCit
- xmlw.writeEndElement(); // citation
- xmlw.writeEndElement(); // stdyDscr
-
+ xmlw.writeEndElement(); // citation
+ xmlw.writeEndElement(); // docDscr
+
+ }
+
+ private static void writeVersionStatement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException{
+ xmlw.writeStartElement("verStmt");
+ writeAttribute(xmlw,"source","DVN");
+ xmlw.writeStartElement("version");
+ writeAttribute(xmlw,"date", datasetVersionDTO.getReleaseTime().substring(0, 10));
+ writeAttribute(xmlw,"type", datasetVersionDTO.getVersionState().toString());
+ xmlw.writeCharacters(datasetVersionDTO.getVersionNumber().toString());
+ xmlw.writeEndElement(); // version
+ xmlw.writeEndElement(); // verStmt
}
+
+ private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+ xmlw.writeStartElement("sumDscr");
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ Integer per = 0;
+ Integer coll = 0;
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.timePeriodCovered.equals(fieldDTO.getTypeName())) {
+ String dateValStart = "";
+ String dateValEnd = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ per++;
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.timePeriodCoveredStart.equals(next.getTypeName())) {
+ dateValStart = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.timePeriodCoveredEnd.equals(next.getTypeName())) {
+ dateValEnd = next.getSinglePrimitive();
+ }
+ }
+ if (!dateValStart.isEmpty()) {
+ writeDateElement(xmlw, "timePrd", "P"+ per.toString(), "start", dateValStart );
+ }
+ if (!dateValEnd.isEmpty()) {
+ writeDateElement(xmlw, "timePrd", "P"+ per.toString(), "end", dateValEnd );
+ }
+ }
+ }
+ if (DatasetFieldConstant.dateOfCollection.equals(fieldDTO.getTypeName())) {
+ String dateValStart = "";
+ String dateValEnd = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ coll++;
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.dateOfCollectionStart.equals(next.getTypeName())) {
+ dateValStart = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.dateOfCollectionEnd.equals(next.getTypeName())) {
+ dateValEnd = next.getSinglePrimitive();
+ }
+ }
+ if (!dateValStart.isEmpty()) {
+ writeDateElement(xmlw, "collDate", "P"+ coll.toString(), "start", dateValStart );
+ }
+ if (!dateValEnd.isEmpty()) {
+ writeDateElement(xmlw, "collDate", "P"+ coll.toString(), "end", dateValEnd );
+ }
+ }
+ }
+ if (DatasetFieldConstant.kindOfData.equals(fieldDTO.getTypeName())) {
+ writeMultipleElement(xmlw, "dataKind", fieldDTO);
+ }
+ }
+ }
+
+ if("geospatial".equals(key)){
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.geographicCoverage.equals(fieldDTO.getTypeName())) {
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.country.equals(next.getTypeName())) {
+ writeFullElement(xmlw, "nation", next.getSinglePrimitive());
+ }
+ if (DatasetFieldConstant.city.equals(next.getTypeName())) {
+ writeFullElement(xmlw, "geogCover", next.getSinglePrimitive());
+ }
+ if (DatasetFieldConstant.state.equals(next.getTypeName())) {
+ writeFullElement(xmlw, "geogCover", next.getSinglePrimitive());
+ }
+ if (DatasetFieldConstant.otherGeographicCoverage.equals(next.getTypeName())) {
+ writeFullElement(xmlw, "geogCover", next.getSinglePrimitive());
+ }
+ }
+ }
+ }
+ if (DatasetFieldConstant.geographicBoundingBox.equals(fieldDTO.getTypeName())) {
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.westLongitude.equals(next.getTypeName())) {
+ writeFullElement(xmlw, "westBL", next.getSinglePrimitive());
+ }
+ if (DatasetFieldConstant.eastLongitude.equals(next.getTypeName())) {
+ writeFullElement(xmlw, "eastBL", next.getSinglePrimitive());
+ }
+ if (DatasetFieldConstant.northLatitude.equals(next.getTypeName())) {
+ writeFullElement(xmlw, "northBL", next.getSinglePrimitive());
+ }
+ if (DatasetFieldConstant.southLatitude.equals(next.getTypeName())) {
+ writeFullElement(xmlw, "southBL", next.getSinglePrimitive());
+ }
- /**
- * @todo Create a full dataDscr and otherMat sections of the DDI. This stub
- * adapted from the minimal DDIExportServiceBean example.
- */
- private static void createdataDscr(XMLStreamWriter xmlw, List fileDtos) throws XMLStreamException {
- if (fileDtos.isEmpty()) {
- return;
+ }
+ }
+ }
+ }
+ writeFullElementList(xmlw, "geogUnit", dto2PrimitiveList(datasetVersionDTO, DatasetFieldConstant.geographicUnit));
+ }
+
+ if("socialscience".equals(key)){
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.universe.equals(fieldDTO.getTypeName())) {
+ writeMultipleElement(xmlw, "universe", fieldDTO);
+ }
+ if (DatasetFieldConstant.unitOfAnalysis.equals(fieldDTO.getTypeName())) {
+ writeMultipleElement(xmlw, "anlyUnit", fieldDTO);
+ }
+ }
+ }
}
- xmlw.writeStartElement("dataDscr");
- xmlw.writeEndElement(); // dataDscr
- for (FileDTO fileDTo : fileDtos) {
- xmlw.writeStartElement("otherMat");
- writeAttribute(xmlw, "ID", "f" + fileDTo.getDatafile().getId());
- writeAttribute(xmlw, "level", "datafile");
- xmlw.writeStartElement("labl");
- xmlw.writeCharacters(fileDTo.getDatafile().getName());
- xmlw.writeEndElement(); // labl
- writeFileDescription(xmlw, fileDTo);
- xmlw.writeEndElement(); // otherMat
+ xmlw.writeEndElement(); //sumDscr
+ }
+
+ private static void writeMultipleElement(XMLStreamWriter xmlw, String element, FieldDTO fieldDTO) throws XMLStreamException {
+ for (String value : fieldDTO.getMultiplePrimitive()) {
+ writeFullElement(xmlw, element, value);
}
}
+
+ private static void writeDateElement(XMLStreamWriter xmlw, String element, String cycle, String event, String dateIn) throws XMLStreamException {
+
+ xmlw.writeStartElement(element);
+ writeAttribute(xmlw, "cycle", cycle);
+ writeAttribute(xmlw, "event", event);
+ writeAttribute(xmlw, "date", dateIn);
+ xmlw.writeCharacters(dateIn);
+ xmlw.writeEndElement();
- private static void writeFileDescription(XMLStreamWriter xmlw, FileDTO fileDTo) throws XMLStreamException {
- xmlw.writeStartElement("txt");
- String description = fileDTo.getDatafile().getDescription();
- if (description != null) {
- xmlw.writeCharacters(description);
- }
- xmlw.writeEndElement(); // txt
}
+
+ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO version) throws XMLStreamException{
+ xmlw.writeStartElement("method");
+ xmlw.writeStartElement("dataColl");
+ writeFullElement(xmlw, "timeMeth", dto2Primitive(version, DatasetFieldConstant.timeMethod));
+ writeFullElement(xmlw, "dataCollector", dto2Primitive(version, DatasetFieldConstant.dataCollector));
+ writeFullElement(xmlw, "collectorTraining", dto2Primitive(version, DatasetFieldConstant.collectorTraining));
+ writeFullElement(xmlw, "frequenc", dto2Primitive(version, DatasetFieldConstant.frequencyOfDataCollection));
+ writeFullElement(xmlw, "sampProc", dto2Primitive(version, DatasetFieldConstant.samplingProcedure));
+ writeTargetSampleElement(xmlw, version);
+ writeFullElement(xmlw, "deviat", dto2Primitive(version, DatasetFieldConstant.deviationsFromSampleDesign));
+ writeFullElement(xmlw, "collMode", dto2Primitive(version, DatasetFieldConstant.collectionMode));
+ writeFullElement(xmlw, "resInstru", dto2Primitive(version, DatasetFieldConstant.researchInstrument));
+ writeFullElement(xmlw, "collSitu", dto2Primitive(version, DatasetFieldConstant.dataCollectionSituation));
+ writeFullElement(xmlw, "actMin", dto2Primitive(version, DatasetFieldConstant.actionsToMinimizeLoss));
+ writeFullElement(xmlw, "conOps", dto2Primitive(version, DatasetFieldConstant.controlOperations));
+ writeFullElement(xmlw, "weight", dto2Primitive(version, DatasetFieldConstant.weighting));
+ writeFullElement(xmlw, "cleanOps", dto2Primitive(version, DatasetFieldConstant.cleaningOperations));
- private static String dto2title(DatasetVersionDTO datasetVersionDTO) {
+ xmlw.writeEndElement(); //dataColl
+ xmlw.writeStartElement("anlyInfo");
+ writeFullElement(xmlw, "anylInfo", dto2Primitive(version, DatasetFieldConstant.datasetLevelErrorNotes));
+ writeFullElement(xmlw, "respRate", dto2Primitive(version, DatasetFieldConstant.responseRate));
+ writeFullElement(xmlw, "estSmpErr", dto2Primitive(version, DatasetFieldConstant.samplingErrorEstimates));
+ writeFullElement(xmlw, "dataAppr", dto2Primitive(version, DatasetFieldConstant.otherDataAppraisal));
+ xmlw.writeEndElement(); //anlyInfo
+ writeNotesElement(xmlw, version);
+
+ xmlw.writeEndElement();//method
+ }
+
+ private static void writeSubjectElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException{
+
+ //Key Words and Topic Classification
+
+ xmlw.writeStartElement("subject");
for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
String key = entry.getKey();
MetadataBlockDTO value = entry.getValue();
if ("citation".equals(key)) {
for (FieldDTO fieldDTO : value.getFields()) {
- if (DatasetFieldConstant.title.equals(fieldDTO.getTypeName())) {
- return fieldDTO.getSinglePrimitive();
+ if (DatasetFieldConstant.subject.equals(fieldDTO.getTypeName())){
+ for ( String subject : fieldDTO.getMultipleVocab()){
+ xmlw.writeStartElement("keyword");
+ xmlw.writeCharacters(subject);
+ xmlw.writeEndElement(); //Keyword
+ }
+ }
+
+ if (DatasetFieldConstant.keyword.equals(fieldDTO.getTypeName())) {
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ String keywordValue = "";
+ String keywordVocab = "";
+ String keywordURI = "";
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.keywordValue.equals(next.getTypeName())) {
+ keywordValue = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.keywordVocab.equals(next.getTypeName())) {
+ keywordVocab = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.keywordVocabURI.equals(next.getTypeName())) {
+ keywordURI = next.getSinglePrimitive();
+ }
+ }
+ if (!keywordValue.isEmpty()){
+ xmlw.writeStartElement("keyword");
+ if(!keywordVocab.isEmpty()){
+ writeAttribute(xmlw,"vocab",keywordVocab);
+ }
+ if(!keywordURI.isEmpty()){
+ writeAttribute(xmlw,"URI",keywordURI);
+ }
+ xmlw.writeCharacters(keywordValue);
+ xmlw.writeEndElement(); //Keyword
+ }
+
+ }
+ }
+ if (DatasetFieldConstant.topicClassification.equals(fieldDTO.getTypeName())) {
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ String topicClassificationValue = "";
+ String topicClassificationVocab = "";
+ String topicClassificationURI = "";
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.topicClassValue.equals(next.getTypeName())) {
+ topicClassificationValue = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.topicClassVocab.equals(next.getTypeName())) {
+ topicClassificationVocab = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.topicClassVocabURI.equals(next.getTypeName())) {
+ topicClassificationURI = next.getSinglePrimitive();
+ }
+ }
+ if (!topicClassificationValue.isEmpty()){
+ xmlw.writeStartElement("topcClas");
+ if(!topicClassificationVocab.isEmpty()){
+ writeAttribute(xmlw,"vocab",topicClassificationVocab);
+ }
+ if(!topicClassificationURI.isEmpty()){
+ writeAttribute(xmlw,"URI",topicClassificationURI);
+ }
+ xmlw.writeCharacters(topicClassificationValue);
+ xmlw.writeEndElement(); //topcClas
+ }
+ }
}
}
}
- }
- return null;
+ }
+ xmlw.writeEndElement(); // subject
}
+
+ private static void writeAuthorsElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
- private static String dto2authors(DatasetVersionDTO datasetVersionDTO) {
for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
String key = entry.getKey();
MetadataBlockDTO value = entry.getValue();
if ("citation".equals(key)) {
for (FieldDTO fieldDTO : value.getFields()) {
if (DatasetFieldConstant.author.equals(fieldDTO.getTypeName())) {
+ xmlw.writeStartElement("rspStmt");
+ String authorName = "";
+ String authorAffiliation = "";
for (HashSet foo : fieldDTO.getMultipleCompound()) {
for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
FieldDTO next = iterator.next();
if (DatasetFieldConstant.authorName.equals(next.getTypeName())) {
- return next.getSinglePrimitive();
+ authorName = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.authorAffiliation.equals(next.getTypeName())) {
+ authorAffiliation = next.getSinglePrimitive();
+ }
+ }
+ if (!authorName.isEmpty()){
+ xmlw.writeStartElement("AuthEnty");
+ if(!authorAffiliation.isEmpty()){
+ writeAttribute(xmlw,"affiliation",authorAffiliation);
+ }
+ xmlw.writeCharacters(authorName);
+ xmlw.writeEndElement(); //AuthEnty
+ }
+ }
+ xmlw.writeEndElement(); //rspStmt
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeContactsElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.datasetContact.equals(fieldDTO.getTypeName())) {
+ String datasetContactName = "";
+ String datasetContactAffiliation = "";
+ String datasetContactEmail = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.datasetContactName.equals(next.getTypeName())) {
+ datasetContactName = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.datasetContactAffiliation.equals(next.getTypeName())) {
+ datasetContactAffiliation = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.datasetContactEmail.equals(next.getTypeName())) {
+ datasetContactEmail = next.getSinglePrimitive();
}
}
+ if (!datasetContactName.isEmpty()){
+ xmlw.writeStartElement("contact");
+ if(!datasetContactAffiliation.isEmpty()){
+ writeAttribute(xmlw,"affiliation",datasetContactAffiliation);
+ }
+ if(!datasetContactEmail.isEmpty()){
+ writeAttribute(xmlw,"email",datasetContactEmail);
+ }
+ xmlw.writeCharacters(datasetContactName);
+ xmlw.writeEndElement(); //AuthEnty
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDTO version) throws XMLStreamException {
+ xmlw.writeStartElement("prodStmt");
+ for (Map.Entry entry : version.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.producer.equals(fieldDTO.getTypeName())) {
+
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ String producerName = "";
+ String producerAffiliation = "";
+ String producerAbbreviation = "";
+ String producerLogo = "";
+ String producerURL = "";
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.producerName.equals(next.getTypeName())) {
+ producerName = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.producerAffiliation.equals(next.getTypeName())) {
+ producerAffiliation = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.producerAbbreviation.equals(next.getTypeName())) {
+ producerAbbreviation = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.producerLogo.equals(next.getTypeName())) {
+ producerLogo = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.producerURL.equals(next.getTypeName())) {
+ producerURL = next.getSinglePrimitive();
+
+ }
+ }
+ if (!producerName.isEmpty()) {
+ xmlw.writeStartElement("producer");
+ if (!producerAffiliation.isEmpty()) {
+ writeAttribute(xmlw, "affiliation", producerAffiliation);
+ }
+ if (!producerAbbreviation.isEmpty()) {
+ writeAttribute(xmlw, "abbr", producerAbbreviation);
+ }
+ if (!producerLogo.isEmpty()) {
+ writeAttribute(xmlw, "role", producerLogo);
+ }
+ if (!producerURL.isEmpty()) {
+ writeAttribute(xmlw, "URI", producerURL);
+ }
+ xmlw.writeCharacters(producerName);
+ xmlw.writeEndElement(); //AuthEnty
+ }
}
+
}
}
}
}
+ writeFullElement(xmlw, "prodDate", dto2Primitive(version, DatasetFieldConstant.productionDate));
+ writeFullElement(xmlw, "prodPlac", dto2Primitive(version, DatasetFieldConstant.productionPlace));
+
+ writeGrantElement(xmlw, version);
+ xmlw.writeEndElement(); //prodStmt
+ }
+
+ private static void writeDistributorsElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.distributor.equals(fieldDTO.getTypeName())) {
+ xmlw.writeStartElement("distrbtr");
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ String distributorName = "";
+ String distributorAffiliation = "";
+ String distributorAbbreviation = "";
+ String distributorURL = "";
+ String distributorLogoURL = "";
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.distributorName.equals(next.getTypeName())) {
+ distributorName = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.distributorAffiliation.equals(next.getTypeName())) {
+ distributorAffiliation = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.distributorAbbreviation.equals(next.getTypeName())) {
+ distributorAbbreviation = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.distributorURL.equals(next.getTypeName())) {
+ distributorURL = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.distributorLogo.equals(next.getTypeName())) {
+ distributorLogoURL = next.getSinglePrimitive();
+ }
+ }
+ if (!distributorName.isEmpty()) {
+ xmlw.writeStartElement("distrbtr");
+ if (!distributorAffiliation.isEmpty()) {
+ writeAttribute(xmlw, "affiliation", distributorAffiliation);
+ }
+ if (!distributorAbbreviation.isEmpty()) {
+ writeAttribute(xmlw, "abbr", distributorAbbreviation);
+ }
+ if (!distributorURL.isEmpty()) {
+ writeAttribute(xmlw, "URI", distributorURL);
+ }
+ if (!distributorLogoURL.isEmpty()) {
+ writeAttribute(xmlw, "role", distributorLogoURL);
+ }
+ xmlw.writeCharacters(distributorName);
+ xmlw.writeEndElement(); //AuthEnty
+ }
+ }
+ xmlw.writeEndElement(); //rspStmt
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeRelPublElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.publication.equals(fieldDTO.getTypeName())) {
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ String pubString = "";
+ String citation = "";
+ String IDType = "";
+ String IDNo = "";
+ String url = "";
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.publicationCitation.equals(next.getTypeName())) {
+ citation = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.publicationIDType.equals(next.getTypeName())) {
+ IDType = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.publicationIDNumber.equals(next.getTypeName())) {
+ IDNo = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.publicationURL.equals(next.getTypeName())) {
+ url = next.getSinglePrimitive();
+ }
+ }
+ pubString = appendCommaSeparatedValue(citation, IDType);
+ pubString = appendCommaSeparatedValue(pubString, IDNo);
+ pubString = appendCommaSeparatedValue(pubString, url);
+ if (!pubString.isEmpty()){
+ xmlw.writeStartElement("relPubl");
+ xmlw.writeCharacters(pubString);
+ xmlw.writeEndElement(); //relPubl
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static String appendCommaSeparatedValue(String inVal, String next) {
+ if (!next.isEmpty()) {
+ if (!inVal.isEmpty()) {
+ return inVal + ", " + next;
+ } else {
+ return next;
+ }
+ }
+ return inVal;
+ }
+
+ private static void writeAbstractElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.description.equals(fieldDTO.getTypeName())) {
+ String descriptionText = "";
+ String descriptionDate = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.descriptionText.equals(next.getTypeName())) {
+ descriptionText = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.descriptionDate.equals(next.getTypeName())) {
+ descriptionDate = next.getSinglePrimitive();
+ }
+ }
+ if (!descriptionText.isEmpty()){
+ xmlw.writeStartElement("abstract");
+ if(!descriptionDate.isEmpty()){
+ writeAttribute(xmlw,"date",descriptionDate);
+ }
+ xmlw.writeCharacters(descriptionText);
+ xmlw.writeEndElement(); //abstract
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeGrantElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.grantNumber.equals(fieldDTO.getTypeName())) {
+ String grantNumber = "";
+ String grantAgency = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.grantNumberValue.equals(next.getTypeName())) {
+ grantNumber = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.grantNumberAgency.equals(next.getTypeName())) {
+ grantAgency = next.getSinglePrimitive();
+ }
+ }
+ if (!grantNumber.isEmpty()){
+ xmlw.writeStartElement("grantNo");
+ if(!grantAgency.isEmpty()){
+ writeAttribute(xmlw,"agency",grantAgency);
+ }
+ xmlw.writeCharacters(grantNumber);
+ xmlw.writeEndElement(); //grantno
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeOtherIdElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.otherId.equals(fieldDTO.getTypeName())) {
+ String otherId = "";
+ String otherIdAgency = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.otherIdValue.equals(next.getTypeName())) {
+ otherId = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.otherIdAgency.equals(next.getTypeName())) {
+ otherIdAgency = next.getSinglePrimitive();
+ }
+ }
+ if (!otherId.isEmpty()){
+ xmlw.writeStartElement("IDNo");
+ if(!otherIdAgency.isEmpty()){
+ writeAttribute(xmlw,"agency",otherIdAgency);
+ }
+ xmlw.writeCharacters(otherId);
+ xmlw.writeEndElement(); //IDNo
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeSoftwareElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.software.equals(fieldDTO.getTypeName())) {
+ String softwareName = "";
+ String softwareVersion = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.softwareName.equals(next.getTypeName())) {
+ softwareName = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.softwareVersion.equals(next.getTypeName())) {
+ softwareVersion = next.getSinglePrimitive();
+ }
+ }
+ if (!softwareName.isEmpty()){
+ xmlw.writeStartElement("software");
+ if(!softwareVersion.isEmpty()){
+ writeAttribute(xmlw,"version",softwareVersion);
+ }
+ xmlw.writeCharacters(softwareName);
+ xmlw.writeEndElement(); //software
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeSeriesElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.series.equals(fieldDTO.getTypeName())) {
+ xmlw.writeStartElement("serStmt");
+ String seriesName = "";
+ String seriesInformation = "";
+ Set foo = fieldDTO.getSingleCompound();
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.seriesName.equals(next.getTypeName())) {
+ seriesName = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.seriesInformation.equals(next.getTypeName())) {
+ seriesInformation = next.getSinglePrimitive();
+ }
+ }
+ if (!seriesName.isEmpty()){
+ xmlw.writeStartElement("serName");
+ xmlw.writeCharacters(seriesName);
+ xmlw.writeEndElement(); //grantno
+ }
+ if (!seriesInformation.isEmpty()){
+ xmlw.writeStartElement("serInfo");
+ xmlw.writeCharacters(seriesInformation);
+ xmlw.writeEndElement(); //grantno
+ }
+ xmlw.writeEndElement(); //serStmt
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeTargetSampleElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("socialscience".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.targetSampleSize.equals(fieldDTO.getTypeName())) {
+ String sizeFormula = "";
+ String actualSize = "";
+ Set foo = fieldDTO.getSingleCompound();
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.targetSampleSizeFormula.equals(next.getTypeName())) {
+ sizeFormula = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.targetSampleActualSize.equals(next.getTypeName())) {
+ actualSize = next.getSinglePrimitive();
+ }
+ }
+ if (!sizeFormula.isEmpty()) {
+ xmlw.writeStartElement("sampleSizeFormula");
+ xmlw.writeCharacters(sizeFormula);
+ xmlw.writeEndElement(); //sampleSizeFormula
+ }
+ if (!actualSize.isEmpty()) {
+ xmlw.writeStartElement("sampleSize");
+ xmlw.writeCharacters(actualSize);
+ xmlw.writeEndElement(); //sampleSize
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeNotesElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("socialscience".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.socialScienceNotes.equals(fieldDTO.getTypeName())) {
+ String notesText = "";
+ String notesType = "";
+ String notesSubject= "";
+ Set foo = fieldDTO.getSingleCompound();
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.socialScienceNotesText.equals(next.getTypeName())) {
+ notesText = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.socialScienceNotesType.equals(next.getTypeName())) {
+ notesType = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.socialScienceNotesSubject.equals(next.getTypeName())) {
+ notesSubject = next.getSinglePrimitive();
+ }
+ }
+ if (!notesText.isEmpty()) {
+ xmlw.writeStartElement("notes");
+ if(!notesType.isEmpty()){
+ writeAttribute(xmlw,"type",notesType);
+ }
+ if(!notesSubject.isEmpty()){
+ writeAttribute(xmlw,"subject",notesSubject);
+ }
+ xmlw.writeCharacters(notesText);
+ xmlw.writeEndElement();
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // TODO:
+ // see if there's more information that we could encode in this otherMat.
+ // contentType? Unfs and such? (in the "short" DDI that is being used for
+ // harvesting *all* files are encoded as otherMats; even tabular ones.
+ private static void createOtherMats(XMLStreamWriter xmlw, List fileDtos) throws XMLStreamException {
+ // The preferred URL for this dataverse, for cooking up the file access API links:
+ String dataverseUrl = getDataverseSiteUrl();
+
+ for (FileDTO fileDTo : fileDtos) {
+ // We'll continue using the scheme we've used before, in DVN2-3: non-tabular files are put into otherMat,
+ // tabular ones - in fileDscr sections. (fileDscr sections have special fields for numbers of variables
+ // and observations, etc.)
+ if (fileDTo.getDataFile().getDataTables() == null || fileDTo.getDataFile().getDataTables().isEmpty()) {
+ xmlw.writeStartElement("otherMat");
+ writeAttribute(xmlw, "ID", "f" + fileDTo.getDataFile().getId());
+ writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + fileDTo.getDataFile().getId());
+ writeAttribute(xmlw, "level", "datafile");
+ xmlw.writeStartElement("labl");
+ xmlw.writeCharacters(fileDTo.getDataFile().getFilename());
+ xmlw.writeEndElement(); // labl
+ writeFileDescription(xmlw, fileDTo);
+ // there's no readily available field in the othermat section
+ // for the content type (aka mime type); so we'll store it in this
+ // specially formatted notes section:
+ String contentType = fileDTo.getDataFile().getContentType();
+ if (!StringUtilisEmpty(contentType)) {
+ xmlw.writeStartElement("notes");
+ writeAttribute(xmlw, "level", LEVEL_FILE);
+ writeAttribute(xmlw, "type", NOTE_TYPE_CONTENTTYPE);
+ writeAttribute(xmlw, "subject", NOTE_SUBJECT_CONTENTTYPE);
+ xmlw.writeCharacters(contentType);
+ xmlw.writeEndElement(); // notes
+ }
+ xmlw.writeEndElement(); // otherMat
+ }
+ }
+ }
+
+ // An alternative version of the createOtherMats method - this one is used
+ // when a "full" DDI is being cooked; just like the fileDscr and data/var sections methods,
+ // it operates on the list of FileMetadata entities, not on File DTOs. This is because
+ // DTOs do not support "tabular", variable-level metadata yet. And we need to be able to
+ // tell if this file is in fact tabular data - so that we know if it needs an
+ // otherMat, or a fileDscr section.
+ // -- L.A. 4.5
+
+ private static void createOtherMatsFromFileMetadatas(XMLStreamWriter xmlw, List fileMetadatas) throws XMLStreamException {
+ // The preferred URL for this dataverse, for cooking up the file access API links:
+ String dataverseUrl = getDataverseSiteUrl();
+
+ for (FileMetadata fileMetadata : fileMetadatas) {
+ // We'll continue using the scheme we've used before, in DVN2-3: non-tabular files are put into otherMat,
+ // tabular ones - in fileDscr sections. (fileDscr sections have special fields for numbers of variables
+ // and observations, etc.)
+ if (fileMetadata.getDataFile() != null && !fileMetadata.getDataFile().isTabularData()) {
+ xmlw.writeStartElement("otherMat");
+ writeAttribute(xmlw, "ID", "f" + fileMetadata.getDataFile().getId());
+ writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + fileMetadata.getDataFile().getId());
+ writeAttribute(xmlw, "level", "datafile");
+ xmlw.writeStartElement("labl");
+ xmlw.writeCharacters(fileMetadata.getLabel());
+ xmlw.writeEndElement(); // labl
+
+ String description = fileMetadata.getDescription();
+ if (description != null) {
+ xmlw.writeStartElement("txt");
+ xmlw.writeCharacters(description);
+ xmlw.writeEndElement(); // txt
+ }
+ // there's no readily available field in the othermat section
+ // for the content type (aka mime type); so we'll store it in this
+ // specially formatted notes section:
+ String contentType = fileMetadata.getDataFile().getContentType();
+ if (!StringUtilisEmpty(contentType)) {
+ xmlw.writeStartElement("notes");
+ writeAttribute(xmlw, "level", LEVEL_FILE);
+ writeAttribute(xmlw, "type", NOTE_TYPE_CONTENTTYPE);
+ writeAttribute(xmlw, "subject", NOTE_SUBJECT_CONTENTTYPE);
+ xmlw.writeCharacters(contentType);
+ xmlw.writeEndElement(); // notes
+ }
+ xmlw.writeEndElement(); // otherMat
+ }
+ }
+ }
+
+ private static void writeFileDescription(XMLStreamWriter xmlw, FileDTO fileDTo) throws XMLStreamException {
+ xmlw.writeStartElement("txt");
+ String description = fileDTo.getDataFile().getDescription();
+ if (description != null) {
+ xmlw.writeCharacters(description);
+ }
+ xmlw.writeEndElement(); // txt
+ }
+
+ private static String dto2Primitive(DatasetVersionDTO datasetVersionDTO, String datasetFieldTypeName) {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ MetadataBlockDTO value = entry.getValue();
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (datasetFieldTypeName.equals(fieldDTO.getTypeName())) {
+ return fieldDTO.getSinglePrimitive();
+ }
+ }
+ }
+ return null;
+ }
+
+ private static List dto2PrimitiveList(DatasetVersionDTO datasetVersionDTO, String datasetFieldTypeName) {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ MetadataBlockDTO value = entry.getValue();
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (datasetFieldTypeName.equals(fieldDTO.getTypeName())) {
+ return fieldDTO.getMultiplePrimitive();
+ }
+ }
+ }
return null;
}
+ private static void writeFullElementList(XMLStreamWriter xmlw, String name, List values) throws XMLStreamException {
+ //For the simplest Elements we can
+ if (values != null && !values.isEmpty()) {
+ for (String value : values) {
+ xmlw.writeStartElement(name);
+ xmlw.writeCharacters(value);
+ xmlw.writeEndElement(); // labl
+ }
+ }
+ }
+
+ private static void writeFullElement (XMLStreamWriter xmlw, String name, String value) throws XMLStreamException {
+ //For the simplest Elements we can
+ if (!StringUtilisEmpty(value)) {
+ xmlw.writeStartElement(name);
+ xmlw.writeCharacters(value);
+ xmlw.writeEndElement(); // labl
+ }
+ }
+
private static void writeAttribute(XMLStreamWriter xmlw, String name, String value) throws XMLStreamException {
if (!StringUtilisEmpty(value)) {
xmlw.writeAttribute(name, value);
@@ -187,5 +1186,321 @@ private static boolean StringUtilisEmpty(String str) {
private static void saveJsonToDisk(String datasetVersionAsJson) throws IOException {
Files.write(Paths.get("/tmp/out.json"), datasetVersionAsJson.getBytes());
}
+
+ /**
+ * The "official", designated URL of the site;
+ * can be defined as a complete URL; or derived from the
+ * "official" hostname. If none of these options is set,
+ * defaults to the InetAddress.getLocalHOst() and https;
+ */
+ private static String getDataverseSiteUrl() {
+ String hostUrl = System.getProperty(SITE_URL);
+ if (hostUrl != null && !"".equals(hostUrl)) {
+ return hostUrl;
+ }
+ String hostName = System.getProperty(FQDN);
+ if (hostName == null) {
+ try {
+ hostName = InetAddress.getLocalHost().getCanonicalHostName();
+ } catch (UnknownHostException e) {
+ hostName = null;
+ }
+ }
+
+ if (hostName != null) {
+ return "https://" + hostName;
+ }
+
+ return "http://localhost:8080";
+ }
+
+
+
+
+ // Methods specific to the tabular data ("") section.
+ // Note that these do NOT operate on DTO objects, but instead directly
+ // on Dataverse DataVariable, DataTable, etc. objects.
+ // This is because for this release (4.5) we are recycling the already available
+ // code, and this is what we got. (We already have DTO objects for DataTable,
+ // and DataVariable, etc., but the current version JsonPrinter.jsonAsDatasetDto()
+ // does not produce JSON for these objects - it stops at DataFile. Eventually
+ // we want all of our objects to be exportable as JSON, and then all the exports
+ // can go through the same DTO state... But we don't have time for it now;
+ // plus, the structure of file-level metadata is currently being re-designed,
+ // so we probably should not invest any time into it right now). -- L.A. 4.5
+
+ private static void createDataDscr(XMLStreamWriter xmlw, DatasetVersion datasetVersion) throws XMLStreamException {
+
+ if (datasetVersion.getFileMetadatas() == null || datasetVersion.getFileMetadatas().isEmpty()) {
+ return;
+ }
+
+ boolean tabularData = false;
+
+ // we're not writing the opening tag until we find an actual
+ // tabular datafile.
+ for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) {
+ DataFile dataFile = fileMetadata.getDataFile();
+
+ if (dataFile != null && dataFile.isTabularData()) {
+ if (!tabularData) {
+ xmlw.writeStartElement("dataDscr");
+ tabularData = true;
+ }
+
+ List vars = dataFile.getDataTable().getDataVariables();
+
+ for (DataVariable var : vars) {
+ createVarDDI(xmlw, var);
+ }
+ }
+ }
+
+ if (tabularData) {
+ xmlw.writeEndElement(); // dataDscr
+ }
+ }
+
+ private static void createVarDDI(XMLStreamWriter xmlw, DataVariable dv) throws XMLStreamException {
+ xmlw.writeStartElement("var");
+ writeAttribute(xmlw, "ID", "v" + dv.getId().toString());
+ writeAttribute(xmlw, "name", dv.getName());
+
+ if (dv.getNumberOfDecimalPoints() != null) {
+ writeAttribute(xmlw, "dcml", dv.getNumberOfDecimalPoints().toString());
+ }
+
+ if (dv.isOrderedCategorical()) {
+ writeAttribute(xmlw, "nature", "ordinal");
+ }
+
+ if (dv.getInterval() != null) {
+ String interval = dv.getIntervalLabel();
+ if (interval != null) {
+ writeAttribute(xmlw, "intrvl", interval);
+ }
+ }
+
+ // location
+ xmlw.writeEmptyElement("location");
+ if (dv.getFileStartPosition() != null) {
+ writeAttribute(xmlw, "StartPos", dv.getFileStartPosition().toString());
+ }
+ if (dv.getFileEndPosition() != null) {
+ writeAttribute(xmlw, "EndPos", dv.getFileEndPosition().toString());
+ }
+ if (dv.getRecordSegmentNumber() != null) {
+ writeAttribute(xmlw, "RecSegNo", dv.getRecordSegmentNumber().toString());
+ }
+
+ writeAttribute(xmlw, "fileid", "f" + dv.getDataTable().getDataFile().getId().toString());
+
+ // labl
+ if (!StringUtilisEmpty(dv.getLabel())) {
+ xmlw.writeStartElement("labl");
+ writeAttribute(xmlw, "level", "variable");
+ xmlw.writeCharacters(dv.getLabel());
+ xmlw.writeEndElement(); //labl
+ }
+
+ // invalrng
+ boolean invalrngAdded = false;
+ for (VariableRange range : dv.getInvalidRanges()) {
+ //if (range.getBeginValueType() != null && range.getBeginValueType().getName().equals(DB_VAR_RANGE_TYPE_POINT)) {
+ if (range.getBeginValueType() != null && range.isBeginValueTypePoint()) {
+ if (range.getBeginValue() != null) {
+ invalrngAdded = checkParentElement(xmlw, "invalrng", invalrngAdded);
+ xmlw.writeEmptyElement("item");
+ writeAttribute(xmlw, "VALUE", range.getBeginValue());
+ }
+ } else {
+ invalrngAdded = checkParentElement(xmlw, "invalrng", invalrngAdded);
+ xmlw.writeEmptyElement("range");
+ if (range.getBeginValueType() != null && range.getBeginValue() != null) {
+ if (range.isBeginValueTypeMin()) {
+ writeAttribute(xmlw, "min", range.getBeginValue());
+ } else if (range.isBeginValueTypeMinExcl()) {
+ writeAttribute(xmlw, "minExclusive", range.getBeginValue());
+ }
+ }
+ if (range.getEndValueType() != null && range.getEndValue() != null) {
+ if (range.isEndValueTypeMax()) {
+ writeAttribute(xmlw, "max", range.getEndValue());
+ } else if (range.isEndValueTypeMaxExcl()) {
+ writeAttribute(xmlw, "maxExclusive", range.getEndValue());
+ }
+ }
+ }
+ }
+ if (invalrngAdded) {
+ xmlw.writeEndElement(); // invalrng
+ }
+
+ //universe
+ if (!StringUtilisEmpty(dv.getUniverse())) {
+ xmlw.writeStartElement("universe");
+ xmlw.writeCharacters(dv.getUniverse());
+ xmlw.writeEndElement(); //universe
+ }
+
+ //sum stats
+ for (SummaryStatistic sumStat : dv.getSummaryStatistics()) {
+ xmlw.writeStartElement("sumStat");
+ if (sumStat.getTypeLabel() != null) {
+ writeAttribute(xmlw, "type", sumStat.getTypeLabel());
+ } else {
+ writeAttribute(xmlw, "type", "unknown");
+ }
+ xmlw.writeCharacters(sumStat.getValue());
+ xmlw.writeEndElement(); //sumStat
+ }
+
+ // categories
+ for (VariableCategory cat : dv.getCategories()) {
+ xmlw.writeStartElement("catgry");
+ if (cat.isMissing()) {
+ writeAttribute(xmlw, "missing", "Y");
+ }
+
+ // catValu
+ xmlw.writeStartElement("catValu");
+ xmlw.writeCharacters(cat.getValue());
+ xmlw.writeEndElement(); //catValu
+
+ // label
+ if (!StringUtilisEmpty(cat.getLabel())) {
+ xmlw.writeStartElement("labl");
+ writeAttribute(xmlw, "level", "category");
+ xmlw.writeCharacters(cat.getLabel());
+ xmlw.writeEndElement(); //labl
+ }
+
+ // catStat
+ if (cat.getFrequency() != null) {
+ xmlw.writeStartElement("catStat");
+ writeAttribute(xmlw, "type", "freq");
+ // if frequency is actually a long value, we want to write "100" instead of "100.0"
+ if (Math.floor(cat.getFrequency()) == cat.getFrequency()) {
+ xmlw.writeCharacters(new Long(cat.getFrequency().longValue()).toString());
+ } else {
+ xmlw.writeCharacters(cat.getFrequency().toString());
+ }
+ xmlw.writeEndElement(); //catStat
+ }
+
+ xmlw.writeEndElement(); //catgry
+ }
+
+ // varFormat
+ xmlw.writeEmptyElement("varFormat");
+ if (dv.isTypeNumeric()) {
+ writeAttribute(xmlw, "type", "numeric");
+ } else if (dv.isTypeCharacter()) {
+ writeAttribute(xmlw, "type", "character");
+ } else {
+ throw new XMLStreamException("Illegal Variable Format Type!");
+ }
+ writeAttribute(xmlw, "formatname", dv.getFormat());
+ //experiment writeAttribute(xmlw, "schema", dv.getFormatSchema());
+ writeAttribute(xmlw, "category", dv.getFormatCategory());
+
+ // notes
+ if (dv.getUnf() != null && !"".equals(dv.getUnf())) {
+ xmlw.writeStartElement("notes");
+ writeAttribute(xmlw, "subject", "Universal Numeric Fingerprint");
+ writeAttribute(xmlw, "level", "variable");
+ writeAttribute(xmlw, "type", "Dataverse:UNF");
+ xmlw.writeCharacters(dv.getUnf());
+ xmlw.writeEndElement(); //notes
+ }
+
+ xmlw.writeEndElement(); //var
+
+ }
+
+ private static void createFileDscr(XMLStreamWriter xmlw, DatasetVersion datasetVersion) throws XMLStreamException {
+ String dataverseUrl = getDataverseSiteUrl();
+ for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) {
+ DataFile dataFile = fileMetadata.getDataFile();
+
+ if (dataFile != null && dataFile.isTabularData()) {
+ DataTable dt = dataFile.getDataTable();
+ xmlw.writeStartElement("fileDscr");
+ writeAttribute(xmlw, "ID", "f" + dataFile.getId());
+ writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + dataFile.getId());
+
+ xmlw.writeStartElement("fileTxt");
+ xmlw.writeStartElement("fileName");
+ xmlw.writeCharacters(fileMetadata.getLabel());
+ xmlw.writeEndElement(); // fileName
+
+ if (dt.getCaseQuantity() != null || dt.getVarQuantity() != null || dt.getRecordsPerCase() != null) {
+ xmlw.writeStartElement("dimensns");
+
+ if (dt.getCaseQuantity() != null) {
+ xmlw.writeStartElement("caseQnty");
+ xmlw.writeCharacters(dt.getCaseQuantity().toString());
+ xmlw.writeEndElement(); // caseQnty
+ }
+
+ if (dt.getVarQuantity() != null) {
+ xmlw.writeStartElement("varQnty");
+ xmlw.writeCharacters(dt.getVarQuantity().toString());
+ xmlw.writeEndElement(); // varQnty
+ }
+
+ if (dt.getRecordsPerCase() != null) {
+ xmlw.writeStartElement("recPrCas");
+ xmlw.writeCharacters(dt.getRecordsPerCase().toString());
+ xmlw.writeEndElement(); // recPrCas
+ }
+
+ xmlw.writeEndElement(); // dimensns
+ }
+
+ xmlw.writeStartElement("fileType");
+ xmlw.writeCharacters(dataFile.getContentType());
+ xmlw.writeEndElement(); // fileType
+
+ xmlw.writeEndElement(); // fileTxt
+
+ // various notes:
+ // this specially formatted note section is used to store the UNF
+ // (Universal Numeric Fingerprint) signature:
+ if (dt.getUnf() != null && !dt.getUnf().equals("")) {
+ xmlw.writeStartElement("notes");
+ writeAttribute(xmlw, "level", LEVEL_FILE);
+ writeAttribute(xmlw, "type", NOTE_TYPE_UNF);
+ writeAttribute(xmlw, "subject", NOTE_SUBJECT_UNF);
+ xmlw.writeCharacters(dt.getUnf());
+ xmlw.writeEndElement(); // notes
+ }
+
+ if (dataFile.getTags() != null) {
+ for (int i = 0; i < dataFile.getTags().size(); i++) {
+ xmlw.writeStartElement("notes");
+ writeAttribute(xmlw, "level", LEVEL_FILE);
+ writeAttribute(xmlw, "type", NOTE_TYPE_TAG);
+ writeAttribute(xmlw, "subject", NOTE_SUBJECT_TAG);
+ xmlw.writeCharacters(dataFile.getTags().get(i).getTypeLabel());
+ xmlw.writeEndElement(); // notes
+ }
+ }
+
+ // TODO: add the remaining fileDscr elements!
+ xmlw.writeEndElement(); // fileDscr
+ }
+ }
+ }
+
+
+
+ private static boolean checkParentElement(XMLStreamWriter xmlw, String elementName, boolean elementAdded) throws XMLStreamException {
+ if (!elementAdded) {
+ xmlw.writeStartElement(elementName);
+ }
+
+ return true;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java
new file mode 100644
index 00000000000..2ace4655d07
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java
@@ -0,0 +1,458 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.export.dublincore;
+
+import com.google.gson.Gson;
+import edu.harvard.iq.dataverse.DatasetFieldConstant;
+import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
+import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
+import edu.harvard.iq.dataverse.api.dto.FieldDTO;
+import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
+import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.json.JsonObject;
+import javax.xml.stream.XMLOutputFactory;
+import javax.xml.stream.XMLStreamException;
+import javax.xml.stream.XMLStreamWriter;
+
+/**
+ *
+ * @author skraffmi
+ */
+public class DublinCoreExportUtil {
+
+ private static final Logger logger = Logger.getLogger(DdiExportUtil.class.getCanonicalName());
+
+ public static String OAI_DC_XML_NAMESPACE = "http://www.openarchives.org/OAI/2.0/oai_dc/";
+ public static String OAI_DC_XML_SCHEMALOCATION = "http://www.openarchives.org/OAI/2.0/oai_dc.xsd";
+
+ public static String DC_XML_NAMESPACE = "http://purl.org/dc/elements/1.1/";
+
+ public static String DCTERMS_XML_NAMESPACE = "http://purl.org/dc/terms/";
+ public static String DCTERMS_DEFAULT_NAMESPACE="http://dublincore.org/documents/dcmi-terms/";
+ public static String DCTERMS_XML_SCHEMALOCATION="http://dublincore.org/schemas/xmls/qdc/dcterms.xsd";
+ public static String DEFAULT_XML_VERSION = "2.0";
+
+ public static String DC_FLAVOR_OAI = "dc";
+ public static String DC_FLAVOR_DCTERMS = "dcterms";
+
+ public static String DEFAULT_DC_FLAVOR = DC_FLAVOR_DCTERMS;
+
+
+ public static void datasetJson2dublincore(JsonObject datasetDtoAsJson, OutputStream outputStream, String dcFlavor) throws XMLStreamException {
+ logger.fine(JsonUtil.prettyPrint(datasetDtoAsJson.toString()));
+ Gson gson = new Gson();
+ DatasetDTO datasetDto = gson.fromJson(datasetDtoAsJson.toString(), DatasetDTO.class);
+ //try {
+ dto2dublincore(datasetDto, outputStream, dcFlavor);
+ //} catch (XMLStreamException ex) {
+ // Logger.getLogger(DdiExportUtil.class.getName()).log(Level.SEVERE, null, ex);
+ //}
+ }
+
+ private static void dto2dublincore(DatasetDTO datasetDto, OutputStream outputStream, String dcFlavor) throws XMLStreamException {
+ XMLStreamWriter xmlw = XMLOutputFactory.newInstance().createXMLStreamWriter(outputStream);
+ if (DC_FLAVOR_DCTERMS.equals(dcFlavor)) {
+ xmlw.writeStartElement("metadata");
+ xmlw.writeAttribute("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance");
+ xmlw.writeAttribute("xmlns:dc", DC_XML_NAMESPACE);
+ xmlw.writeAttribute("xmlns:dcterms", DCTERMS_XML_NAMESPACE);
+ xmlw.writeDefaultNamespace(DCTERMS_DEFAULT_NAMESPACE);
+ //xmlw.writeAttribute("xsi:schemaLocation", DCTERMS_DEFAULT_NAMESPACE+" "+DCTERMS_XML_SCHEMALOCATION);
+ } else if (DC_FLAVOR_OAI.equals(dcFlavor)) {
+ xmlw.writeStartElement("oai_dc:dc");
+ xmlw.writeAttribute("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance");
+ xmlw.writeAttribute("xmlns:oai_dc", OAI_DC_XML_NAMESPACE);
+ xmlw.writeAttribute("xmlns:dc", DC_XML_NAMESPACE);
+ xmlw.writeAttribute("xsi:schemaLocation", OAI_DC_XML_NAMESPACE+" "+OAI_DC_XML_SCHEMALOCATION);
+ writeAttribute(xmlw, "version", DEFAULT_XML_VERSION);
+ }
+
+ createDC(xmlw, datasetDto, dcFlavor);
+ xmlw.writeEndElement(); // or
+ xmlw.flush();
+ }
+
+ //TODO:
+ // If the requested flavor is "OAI_DC" (the minimal, original 15 field format),
+ // we shuld NOT be exporting the extended, DCTERMS fields
+ // - such as, for example, "dateSubmitted" ... (4.5.1?)
+ // -- L.A.
+
+ private static void createDC(XMLStreamWriter xmlw, DatasetDTO datasetDto, String dcFlavor) throws XMLStreamException {
+ DatasetVersionDTO version = datasetDto.getDatasetVersion();
+ String persistentAgency = datasetDto.getProtocol();
+ String persistentAuthority = datasetDto.getAuthority();
+ String persistentId = datasetDto.getIdentifier();
+
+ writeFullElement(xmlw, dcFlavor+":"+"title", dto2Primitive(version, DatasetFieldConstant.title));
+
+ xmlw.writeStartElement(dcFlavor+":"+"identifier");
+ xmlw.writeCharacters(persistentAgency + ":" + persistentAuthority + "/" + persistentId);
+ xmlw.writeEndElement(); // decterms:identifier
+
+ writeAuthorsElement(xmlw, version, dcFlavor);
+
+ writeFullElement(xmlw, dcFlavor+":"+"publisher", datasetDto.getPublisher());
+ writeFullElement(xmlw, dcFlavor+":"+"issued", datasetDto.getPublicationDate());
+
+ writeFullElement(xmlw, dcFlavor+":"+"modified", datasetDto.getDatasetVersion().getLastUpdateTime());
+ writeAbstractElement(xmlw, version, dcFlavor); // Description
+ writeSubjectElement(xmlw, version, dcFlavor); //Subjects and Key Words
+
+ writeFullElementList(xmlw, dcFlavor+":"+"language", dto2PrimitiveList(version, DatasetFieldConstant.language));
+
+ writeRelPublElement(xmlw, version, dcFlavor);
+ writeFullElement(xmlw, dcFlavor+":"+"date", dto2Primitive(version, DatasetFieldConstant.productionDate));
+
+ writeFullElement(xmlw, dcFlavor+":"+"contributor", dto2Primitive(version, DatasetFieldConstant.depositor));
+
+ writeContributorElement(xmlw, version, dcFlavor);
+ writeFullElement(xmlw, dcFlavor+":"+"dateSubmitted", dto2Primitive(version, DatasetFieldConstant.dateOfDeposit));
+
+ writeTimeElements(xmlw, version, dcFlavor);
+
+ writeFullElementList(xmlw, dcFlavor+":"+"relation", dto2PrimitiveList(version, DatasetFieldConstant.relatedDatasets));
+
+ writeFullElementList(xmlw, dcFlavor+":"+"type", dto2PrimitiveList(version, DatasetFieldConstant.kindOfData));
+
+ writeFullElementList(xmlw, dcFlavor+":"+"source", dto2PrimitiveList(version, DatasetFieldConstant.dataSources));
+
+ //Geo Elements
+ writeSpatialElements(xmlw, version, dcFlavor);
+
+ //License and Terms
+ writeFullElement(xmlw, dcFlavor+":"+"license", version.getLicense());
+ writeFullElement(xmlw, dcFlavor+":"+"rights", version.getTermsOfUse());
+ writeFullElement(xmlw, dcFlavor+":"+"rights", version.getRestrictions());
+
+ }
+
+ private static void writeAuthorsElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String dcFlavor) throws XMLStreamException {
+
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.author.equals(fieldDTO.getTypeName())) {
+ String authorName = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.authorName.equals(next.getTypeName())) {
+ authorName = next.getSinglePrimitive();
+ }
+ }
+ if (!authorName.isEmpty()) {
+ xmlw.writeStartElement(dcFlavor+":"+"creator");
+ xmlw.writeCharacters(authorName);
+ xmlw.writeEndElement(); //AuthEnty
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeAbstractElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String dcFlavor) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.description.equals(fieldDTO.getTypeName())) {
+ String descriptionText = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.descriptionText.equals(next.getTypeName())) {
+ descriptionText = next.getSinglePrimitive();
+ }
+ }
+ if (!descriptionText.isEmpty()){
+ xmlw.writeStartElement(dcFlavor+":"+"description");
+ xmlw.writeCharacters(descriptionText);
+ xmlw.writeEndElement(); //abstract
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeSubjectElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String dcFlavor) throws XMLStreamException{
+
+ //Key Words and Subject
+
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.subject.equals(fieldDTO.getTypeName())){
+ for ( String subject : fieldDTO.getMultipleVocab()){
+ xmlw.writeStartElement(dcFlavor+":"+"subject");
+ xmlw.writeCharacters(subject);
+ xmlw.writeEndElement(); //Keyword
+ }
+ }
+
+ if (DatasetFieldConstant.keyword.equals(fieldDTO.getTypeName())) {
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ String keywordValue = "";
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.keywordValue.equals(next.getTypeName())) {
+ keywordValue = next.getSinglePrimitive();
+ }
+ }
+ if (!keywordValue.isEmpty()){
+ xmlw.writeStartElement(dcFlavor+":"+"subject");
+ xmlw.writeCharacters(keywordValue);
+ xmlw.writeEndElement(); //Keyword
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeRelPublElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String dcFlavor) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.publication.equals(fieldDTO.getTypeName())) {
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ String pubString = "";
+ String citation = "";
+ String IDType = "";
+ String IDNo = "";
+ String url = "";
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.publicationCitation.equals(next.getTypeName())) {
+ citation = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.publicationIDType.equals(next.getTypeName())) {
+ IDType = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.publicationIDNumber.equals(next.getTypeName())) {
+ IDNo = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.publicationURL.equals(next.getTypeName())) {
+ url = next.getSinglePrimitive();
+ }
+ }
+ pubString = appendCommaSeparatedValue(citation, IDType);
+ pubString = appendCommaSeparatedValue(pubString, IDNo);
+ pubString = appendCommaSeparatedValue(pubString, url);
+ if (!pubString.isEmpty()){
+ xmlw.writeStartElement(dcFlavor+":"+"isReferencedBy");
+ xmlw.writeCharacters(pubString);
+ xmlw.writeEndElement(); //relPubl
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeContributorElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String dcFlavor) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.contributor.equals(fieldDTO.getTypeName())) {
+ String contributorName = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.contributorName.equals(next.getTypeName())) {
+ contributorName = next.getSinglePrimitive();
+ }
+ }
+ if (!contributorName.isEmpty()){
+ xmlw.writeStartElement(dcFlavor+":"+"contributor");
+ xmlw.writeCharacters(contributorName);
+ xmlw.writeEndElement(); //abstract
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeTimeElements(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String dcFlavor) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if ("citation".equals(key)) {
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.timePeriodCovered.equals(fieldDTO.getTypeName())) {
+ String dateValStart = "";
+ String dateValEnd = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.timePeriodCoveredStart.equals(next.getTypeName())) {
+ dateValStart = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.timePeriodCoveredEnd.equals(next.getTypeName())) {
+ dateValEnd = next.getSinglePrimitive();
+ }
+ }
+ if (!dateValStart.isEmpty()) {
+ writeFullElement(xmlw, dcFlavor+":"+"temporal", dateValStart);
+ }
+ if (!dateValEnd.isEmpty()) {
+ writeFullElement(xmlw, dcFlavor+":"+"temporal", dateValEnd);
+ }
+ }
+ }
+ if (DatasetFieldConstant.dateOfCollection.equals(fieldDTO.getTypeName())) {
+ String dateValStart = "";
+ String dateValEnd = "";
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.dateOfCollectionStart.equals(next.getTypeName())) {
+ dateValStart = next.getSinglePrimitive();
+ }
+ if (DatasetFieldConstant.dateOfCollectionEnd.equals(next.getTypeName())) {
+ dateValEnd = next.getSinglePrimitive();
+ }
+ }
+ if (!dateValStart.isEmpty()) {
+ writeFullElement(xmlw, dcFlavor+":"+"temporal", dateValStart);
+ }
+ if (!dateValEnd.isEmpty()) {
+ writeFullElement(xmlw, dcFlavor+":"+"temporal", dateValEnd);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static void writeSpatialElements(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String dcFlavor) throws XMLStreamException {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ String key = entry.getKey();
+ MetadataBlockDTO value = entry.getValue();
+ if("geospatial".equals(key)){
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (DatasetFieldConstant.geographicCoverage.equals(fieldDTO.getTypeName())) {
+ for (HashSet foo : fieldDTO.getMultipleCompound()) {
+ for (Iterator iterator = foo.iterator(); iterator.hasNext();) {
+ FieldDTO next = iterator.next();
+ if (DatasetFieldConstant.country.equals(next.getTypeName())) {
+ writeFullElement(xmlw, dcFlavor+":"+"spatial", next.getSinglePrimitive());
+ }
+ if (DatasetFieldConstant.city.equals(next.getTypeName())) {
+ writeFullElement(xmlw, dcFlavor+":"+"spatial", next.getSinglePrimitive());
+ }
+ if (DatasetFieldConstant.state.equals(next.getTypeName())) {
+ writeFullElement(xmlw, dcFlavor+":"+"spatial", next.getSinglePrimitive());
+ }
+ if (DatasetFieldConstant.otherGeographicCoverage.equals(next.getTypeName())) {
+ writeFullElement(xmlw, dcFlavor+":"+"spatial", next.getSinglePrimitive());
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private static String appendCommaSeparatedValue(String inVal, String next) {
+ if (!next.isEmpty()) {
+ if (!inVal.isEmpty()) {
+ return inVal + ", " + next;
+ } else {
+ return next;
+ }
+ }
+ return inVal;
+ }
+
+
+ private static String dto2Primitive(DatasetVersionDTO datasetVersionDTO, String datasetFieldTypeName) {
+ for (Map.Entry entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
+ MetadataBlockDTO value = entry.getValue();
+ for (FieldDTO fieldDTO : value.getFields()) {
+ if (datasetFieldTypeName.equals(fieldDTO.getTypeName())) {
+ return fieldDTO.getSinglePrimitive();
+ }
+ }
+ }
+ return null;
+ }
+
+ private static List